Move ldexp, scalbn and scalbln folds to match.pd
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "predict.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "tree-object-size.h"
39 #include "realmpfr.h"
40 #include "cfgrtl.h"
41 #include "internal-fn.h"
42 #include "flags.h"
43 #include "regs.h"
44 #include "except.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "cgraph.h"
69 #include "tree-chkp.h"
70 #include "rtl-chkp.h"
71
72
73 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89 #undef DEF_BUILTIN
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, machine_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
134 machine_mode, int, tree);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree, bool);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_bitop (tree, tree);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_isascii (location_t, tree);
166 static tree fold_builtin_toascii (location_t, tree);
167 static tree fold_builtin_isdigit (location_t, tree);
168 static tree fold_builtin_fabs (location_t, tree, tree);
169 static tree fold_builtin_abs (location_t, tree, tree);
170 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
171 enum tree_code);
172 static tree fold_builtin_0 (location_t, tree);
173 static tree fold_builtin_1 (location_t, tree, tree);
174 static tree fold_builtin_2 (location_t, tree, tree, tree);
175 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_varargs (location_t, tree, tree*, int);
177
178 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
179 static tree fold_builtin_strstr (location_t, tree, tree, tree);
180 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
181 static tree fold_builtin_strspn (location_t, tree, tree);
182 static tree fold_builtin_strcspn (location_t, tree, tree);
183
184 static rtx expand_builtin_object_size (tree);
185 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
186 enum built_in_function);
187 static void maybe_emit_chk_warning (tree, enum built_in_function);
188 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
189 static void maybe_emit_free_warning (tree);
190 static tree fold_builtin_object_size (tree, tree);
191
192 unsigned HOST_WIDE_INT target_newline;
193 unsigned HOST_WIDE_INT target_percent;
194 static unsigned HOST_WIDE_INT target_c;
195 static unsigned HOST_WIDE_INT target_s;
196 char target_percent_c[3];
197 char target_percent_s[3];
198 char target_percent_s_newline[4];
199 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
200 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
201 static tree do_mpfr_arg2 (tree, tree, tree,
202 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
203 static tree do_mpfr_arg3 (tree, tree, tree, tree,
204 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
205 static tree do_mpfr_sincos (tree, tree, tree);
206 static tree do_mpfr_bessel_n (tree, tree, tree,
207 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
208 const REAL_VALUE_TYPE *, bool);
209 static tree do_mpfr_remquo (tree, tree, tree);
210 static tree do_mpfr_lgamma_r (tree, tree, tree);
211 static void expand_builtin_sync_synchronize (void);
212
213 /* Return true if NAME starts with __builtin_ or __sync_. */
214
215 static bool
216 is_builtin_name (const char *name)
217 {
218 if (strncmp (name, "__builtin_", 10) == 0)
219 return true;
220 if (strncmp (name, "__sync_", 7) == 0)
221 return true;
222 if (strncmp (name, "__atomic_", 9) == 0)
223 return true;
224 if (flag_cilkplus
225 && (!strcmp (name, "__cilkrts_detach")
226 || !strcmp (name, "__cilkrts_pop_frame")))
227 return true;
228 return false;
229 }
230
231
232 /* Return true if DECL is a function symbol representing a built-in. */
233
234 bool
235 is_builtin_fn (tree decl)
236 {
237 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
238 }
239
240 /* Return true if NODE should be considered for inline expansion regardless
241 of the optimization level. This means whenever a function is invoked with
242 its "internal" name, which normally contains the prefix "__builtin". */
243
244 static bool
245 called_as_built_in (tree node)
246 {
247 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
248 we want the name used to call the function, not the name it
249 will have. */
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 return is_builtin_name (name);
252 }
253
254 /* Compute values M and N such that M divides (address of EXP - N) and such
255 that N < M. If these numbers can be determined, store M in alignp and N in
256 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
257 *alignp and any bit-offset to *bitposp.
258
259 Note that the address (and thus the alignment) computed here is based
260 on the address to which a symbol resolves, whereas DECL_ALIGN is based
261 on the address at which an object is actually located. These two
262 addresses are not always the same. For example, on ARM targets,
263 the address &foo of a Thumb function foo() has the lowest bit set,
264 whereas foo() itself starts on an even address.
265
266 If ADDR_P is true we are taking the address of the memory reference EXP
267 and thus cannot rely on the access taking place. */
268
269 static bool
270 get_object_alignment_2 (tree exp, unsigned int *alignp,
271 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
272 {
273 HOST_WIDE_INT bitsize, bitpos;
274 tree offset;
275 machine_mode mode;
276 int unsignedp, volatilep;
277 unsigned int align = BITS_PER_UNIT;
278 bool known_alignment = false;
279
280 /* Get the innermost object and the constant (bitpos) and possibly
281 variable (offset) offset of the access. */
282 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
283 &mode, &unsignedp, &volatilep, true);
284
285 /* Extract alignment information from the innermost object and
286 possibly adjust bitpos and offset. */
287 if (TREE_CODE (exp) == FUNCTION_DECL)
288 {
289 /* Function addresses can encode extra information besides their
290 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
291 allows the low bit to be used as a virtual bit, we know
292 that the address itself must be at least 2-byte aligned. */
293 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
294 align = 2 * BITS_PER_UNIT;
295 }
296 else if (TREE_CODE (exp) == LABEL_DECL)
297 ;
298 else if (TREE_CODE (exp) == CONST_DECL)
299 {
300 /* The alignment of a CONST_DECL is determined by its initializer. */
301 exp = DECL_INITIAL (exp);
302 align = TYPE_ALIGN (TREE_TYPE (exp));
303 if (CONSTANT_CLASS_P (exp))
304 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
305
306 known_alignment = true;
307 }
308 else if (DECL_P (exp))
309 {
310 align = DECL_ALIGN (exp);
311 known_alignment = true;
312 }
313 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
314 {
315 align = TYPE_ALIGN (TREE_TYPE (exp));
316 }
317 else if (TREE_CODE (exp) == INDIRECT_REF
318 || TREE_CODE (exp) == MEM_REF
319 || TREE_CODE (exp) == TARGET_MEM_REF)
320 {
321 tree addr = TREE_OPERAND (exp, 0);
322 unsigned ptr_align;
323 unsigned HOST_WIDE_INT ptr_bitpos;
324 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
325
326 /* If the address is explicitely aligned, handle that. */
327 if (TREE_CODE (addr) == BIT_AND_EXPR
328 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
329 {
330 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
331 ptr_bitmask *= BITS_PER_UNIT;
332 align = ptr_bitmask & -ptr_bitmask;
333 addr = TREE_OPERAND (addr, 0);
334 }
335
336 known_alignment
337 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
338 align = MAX (ptr_align, align);
339
340 /* Re-apply explicit alignment to the bitpos. */
341 ptr_bitpos &= ptr_bitmask;
342
343 /* The alignment of the pointer operand in a TARGET_MEM_REF
344 has to take the variable offset parts into account. */
345 if (TREE_CODE (exp) == TARGET_MEM_REF)
346 {
347 if (TMR_INDEX (exp))
348 {
349 unsigned HOST_WIDE_INT step = 1;
350 if (TMR_STEP (exp))
351 step = TREE_INT_CST_LOW (TMR_STEP (exp));
352 align = MIN (align, (step & -step) * BITS_PER_UNIT);
353 }
354 if (TMR_INDEX2 (exp))
355 align = BITS_PER_UNIT;
356 known_alignment = false;
357 }
358
359 /* When EXP is an actual memory reference then we can use
360 TYPE_ALIGN of a pointer indirection to derive alignment.
361 Do so only if get_pointer_alignment_1 did not reveal absolute
362 alignment knowledge and if using that alignment would
363 improve the situation. */
364 if (!addr_p && !known_alignment
365 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
366 align = TYPE_ALIGN (TREE_TYPE (exp));
367 else
368 {
369 /* Else adjust bitpos accordingly. */
370 bitpos += ptr_bitpos;
371 if (TREE_CODE (exp) == MEM_REF
372 || TREE_CODE (exp) == TARGET_MEM_REF)
373 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
374 }
375 }
376 else if (TREE_CODE (exp) == STRING_CST)
377 {
378 /* STRING_CST are the only constant objects we allow to be not
379 wrapped inside a CONST_DECL. */
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 if (CONSTANT_CLASS_P (exp))
382 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
383
384 known_alignment = true;
385 }
386
387 /* If there is a non-constant offset part extract the maximum
388 alignment that can prevail. */
389 if (offset)
390 {
391 unsigned int trailing_zeros = tree_ctz (offset);
392 if (trailing_zeros < HOST_BITS_PER_INT)
393 {
394 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
395 if (inner)
396 align = MIN (align, inner);
397 }
398 }
399
400 *alignp = align;
401 *bitposp = bitpos & (*alignp - 1);
402 return known_alignment;
403 }
404
405 /* For a memory reference expression EXP compute values M and N such that M
406 divides (&EXP - N) and such that N < M. If these numbers can be determined,
407 store M in alignp and N in *BITPOSP and return true. Otherwise return false
408 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
409
410 bool
411 get_object_alignment_1 (tree exp, unsigned int *alignp,
412 unsigned HOST_WIDE_INT *bitposp)
413 {
414 return get_object_alignment_2 (exp, alignp, bitposp, false);
415 }
416
417 /* Return the alignment in bits of EXP, an object. */
418
419 unsigned int
420 get_object_alignment (tree exp)
421 {
422 unsigned HOST_WIDE_INT bitpos = 0;
423 unsigned int align;
424
425 get_object_alignment_1 (exp, &align, &bitpos);
426
427 /* align and bitpos now specify known low bits of the pointer.
428 ptr & (align - 1) == bitpos. */
429
430 if (bitpos != 0)
431 align = (bitpos & -bitpos);
432 return align;
433 }
434
435 /* For a pointer valued expression EXP compute values M and N such that M
436 divides (EXP - N) and such that N < M. If these numbers can be determined,
437 store M in alignp and N in *BITPOSP and return true. Return false if
438 the results are just a conservative approximation.
439
440 If EXP is not a pointer, false is returned too. */
441
442 bool
443 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
444 unsigned HOST_WIDE_INT *bitposp)
445 {
446 STRIP_NOPS (exp);
447
448 if (TREE_CODE (exp) == ADDR_EXPR)
449 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
450 alignp, bitposp, true);
451 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
452 {
453 unsigned int align;
454 unsigned HOST_WIDE_INT bitpos;
455 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
456 &align, &bitpos);
457 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
458 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
459 else
460 {
461 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
462 if (trailing_zeros < HOST_BITS_PER_INT)
463 {
464 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
465 if (inner)
466 align = MIN (align, inner);
467 }
468 }
469 *alignp = align;
470 *bitposp = bitpos & (align - 1);
471 return res;
472 }
473 else if (TREE_CODE (exp) == SSA_NAME
474 && POINTER_TYPE_P (TREE_TYPE (exp)))
475 {
476 unsigned int ptr_align, ptr_misalign;
477 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
478
479 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
480 {
481 *bitposp = ptr_misalign * BITS_PER_UNIT;
482 *alignp = ptr_align * BITS_PER_UNIT;
483 /* We cannot really tell whether this result is an approximation. */
484 return true;
485 }
486 else
487 {
488 *bitposp = 0;
489 *alignp = BITS_PER_UNIT;
490 return false;
491 }
492 }
493 else if (TREE_CODE (exp) == INTEGER_CST)
494 {
495 *alignp = BIGGEST_ALIGNMENT;
496 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
497 & (BIGGEST_ALIGNMENT - 1));
498 return true;
499 }
500
501 *bitposp = 0;
502 *alignp = BITS_PER_UNIT;
503 return false;
504 }
505
506 /* Return the alignment in bits of EXP, a pointer valued expression.
507 The alignment returned is, by default, the alignment of the thing that
508 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
509
510 Otherwise, look at the expression to see if we can do better, i.e., if the
511 expression is actually pointing at an object whose alignment is tighter. */
512
513 unsigned int
514 get_pointer_alignment (tree exp)
515 {
516 unsigned HOST_WIDE_INT bitpos = 0;
517 unsigned int align;
518
519 get_pointer_alignment_1 (exp, &align, &bitpos);
520
521 /* align and bitpos now specify known low bits of the pointer.
522 ptr & (align - 1) == bitpos. */
523
524 if (bitpos != 0)
525 align = (bitpos & -bitpos);
526
527 return align;
528 }
529
530 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
531 way, because it could contain a zero byte in the middle.
532 TREE_STRING_LENGTH is the size of the character array, not the string.
533
534 ONLY_VALUE should be nonzero if the result is not going to be emitted
535 into the instruction stream and zero if it is going to be expanded.
536 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
537 is returned, otherwise NULL, since
538 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
539 evaluate the side-effects.
540
541 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
542 accesses. Note that this implies the result is not going to be emitted
543 into the instruction stream.
544
545 The value returned is of type `ssizetype'.
546
547 Unfortunately, string_constant can't access the values of const char
548 arrays with initializers, so neither can we do so here. */
549
550 tree
551 c_strlen (tree src, int only_value)
552 {
553 tree offset_node;
554 HOST_WIDE_INT offset;
555 int max;
556 const char *ptr;
557 location_t loc;
558
559 STRIP_NOPS (src);
560 if (TREE_CODE (src) == COND_EXPR
561 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
562 {
563 tree len1, len2;
564
565 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
566 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
567 if (tree_int_cst_equal (len1, len2))
568 return len1;
569 }
570
571 if (TREE_CODE (src) == COMPOUND_EXPR
572 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
573 return c_strlen (TREE_OPERAND (src, 1), only_value);
574
575 loc = EXPR_LOC_OR_LOC (src, input_location);
576
577 src = string_constant (src, &offset_node);
578 if (src == 0)
579 return NULL_TREE;
580
581 max = TREE_STRING_LENGTH (src) - 1;
582 ptr = TREE_STRING_POINTER (src);
583
584 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
585 {
586 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
587 compute the offset to the following null if we don't know where to
588 start searching for it. */
589 int i;
590
591 for (i = 0; i < max; i++)
592 if (ptr[i] == 0)
593 return NULL_TREE;
594
595 /* We don't know the starting offset, but we do know that the string
596 has no internal zero bytes. We can assume that the offset falls
597 within the bounds of the string; otherwise, the programmer deserves
598 what he gets. Subtract the offset from the length of the string,
599 and return that. This would perhaps not be valid if we were dealing
600 with named arrays in addition to literal string constants. */
601
602 return size_diffop_loc (loc, size_int (max), offset_node);
603 }
604
605 /* We have a known offset into the string. Start searching there for
606 a null character if we can represent it as a single HOST_WIDE_INT. */
607 if (offset_node == 0)
608 offset = 0;
609 else if (! tree_fits_shwi_p (offset_node))
610 offset = -1;
611 else
612 offset = tree_to_shwi (offset_node);
613
614 /* If the offset is known to be out of bounds, warn, and call strlen at
615 runtime. */
616 if (offset < 0 || offset > max)
617 {
618 /* Suppress multiple warnings for propagated constant strings. */
619 if (only_value != 2
620 && !TREE_NO_WARNING (src))
621 {
622 warning_at (loc, 0, "offset outside bounds of constant string");
623 TREE_NO_WARNING (src) = 1;
624 }
625 return NULL_TREE;
626 }
627
628 /* Use strlen to search for the first zero byte. Since any strings
629 constructed with build_string will have nulls appended, we win even
630 if we get handed something like (char[4])"abcd".
631
632 Since OFFSET is our starting index into the string, no further
633 calculation is needed. */
634 return ssize_int (strlen (ptr + offset));
635 }
636
637 /* Return a char pointer for a C string if it is a string constant
638 or sum of string constant and integer constant. */
639
640 const char *
641 c_getstr (tree src)
642 {
643 tree offset_node;
644
645 src = string_constant (src, &offset_node);
646 if (src == 0)
647 return 0;
648
649 if (offset_node == 0)
650 return TREE_STRING_POINTER (src);
651 else if (!tree_fits_uhwi_p (offset_node)
652 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
653 return 0;
654
655 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
656 }
657
658 /* Return a constant integer corresponding to target reading
659 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
660
661 static rtx
662 c_readstr (const char *str, machine_mode mode)
663 {
664 HOST_WIDE_INT ch;
665 unsigned int i, j;
666 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
667
668 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
669 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
670 / HOST_BITS_PER_WIDE_INT;
671
672 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
673 for (i = 0; i < len; i++)
674 tmp[i] = 0;
675
676 ch = 1;
677 for (i = 0; i < GET_MODE_SIZE (mode); i++)
678 {
679 j = i;
680 if (WORDS_BIG_ENDIAN)
681 j = GET_MODE_SIZE (mode) - i - 1;
682 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
683 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
684 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
685 j *= BITS_PER_UNIT;
686
687 if (ch)
688 ch = (unsigned char) str[i];
689 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
690 }
691
692 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
693 return immed_wide_int_const (c, mode);
694 }
695
696 /* Cast a target constant CST to target CHAR and if that value fits into
697 host char type, return zero and put that value into variable pointed to by
698 P. */
699
700 static int
701 target_char_cast (tree cst, char *p)
702 {
703 unsigned HOST_WIDE_INT val, hostval;
704
705 if (TREE_CODE (cst) != INTEGER_CST
706 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
707 return 1;
708
709 /* Do not care if it fits or not right here. */
710 val = TREE_INT_CST_LOW (cst);
711
712 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
713 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
714
715 hostval = val;
716 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
717 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
718
719 if (val != hostval)
720 return 1;
721
722 *p = hostval;
723 return 0;
724 }
725
726 /* Similar to save_expr, but assumes that arbitrary code is not executed
727 in between the multiple evaluations. In particular, we assume that a
728 non-addressable local variable will not be modified. */
729
730 static tree
731 builtin_save_expr (tree exp)
732 {
733 if (TREE_CODE (exp) == SSA_NAME
734 || (TREE_ADDRESSABLE (exp) == 0
735 && (TREE_CODE (exp) == PARM_DECL
736 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
737 return exp;
738
739 return save_expr (exp);
740 }
741
742 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
743 times to get the address of either a higher stack frame, or a return
744 address located within it (depending on FNDECL_CODE). */
745
746 static rtx
747 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
748 {
749 int i;
750 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
751 if (tem == NULL_RTX)
752 {
753 /* For a zero count with __builtin_return_address, we don't care what
754 frame address we return, because target-specific definitions will
755 override us. Therefore frame pointer elimination is OK, and using
756 the soft frame pointer is OK.
757
758 For a nonzero count, or a zero count with __builtin_frame_address,
759 we require a stable offset from the current frame pointer to the
760 previous one, so we must use the hard frame pointer, and
761 we must disable frame pointer elimination. */
762 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
763 tem = frame_pointer_rtx;
764 else
765 {
766 tem = hard_frame_pointer_rtx;
767
768 /* Tell reload not to eliminate the frame pointer. */
769 crtl->accesses_prior_frames = 1;
770 }
771 }
772
773 if (count > 0)
774 SETUP_FRAME_ADDRESSES ();
775
776 /* On the SPARC, the return address is not in the frame, it is in a
777 register. There is no way to access it off of the current frame
778 pointer, but it can be accessed off the previous frame pointer by
779 reading the value from the register window save area. */
780 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
781 count--;
782
783 /* Scan back COUNT frames to the specified frame. */
784 for (i = 0; i < count; i++)
785 {
786 /* Assume the dynamic chain pointer is in the word that the
787 frame address points to, unless otherwise specified. */
788 tem = DYNAMIC_CHAIN_ADDRESS (tem);
789 tem = memory_address (Pmode, tem);
790 tem = gen_frame_mem (Pmode, tem);
791 tem = copy_to_reg (tem);
792 }
793
794 /* For __builtin_frame_address, return what we've got. But, on
795 the SPARC for example, we may have to add a bias. */
796 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
797 return FRAME_ADDR_RTX (tem);
798
799 /* For __builtin_return_address, get the return address from that frame. */
800 #ifdef RETURN_ADDR_RTX
801 tem = RETURN_ADDR_RTX (count, tem);
802 #else
803 tem = memory_address (Pmode,
804 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
805 tem = gen_frame_mem (Pmode, tem);
806 #endif
807 return tem;
808 }
809
810 /* Alias set used for setjmp buffer. */
811 static alias_set_type setjmp_alias_set = -1;
812
813 /* Construct the leading half of a __builtin_setjmp call. Control will
814 return to RECEIVER_LABEL. This is also called directly by the SJLJ
815 exception handling code. */
816
817 void
818 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
819 {
820 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
821 rtx stack_save;
822 rtx mem;
823
824 if (setjmp_alias_set == -1)
825 setjmp_alias_set = new_alias_set ();
826
827 buf_addr = convert_memory_address (Pmode, buf_addr);
828
829 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
830
831 /* We store the frame pointer and the address of receiver_label in
832 the buffer and use the rest of it for the stack save area, which
833 is machine-dependent. */
834
835 mem = gen_rtx_MEM (Pmode, buf_addr);
836 set_mem_alias_set (mem, setjmp_alias_set);
837 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
838
839 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
840 GET_MODE_SIZE (Pmode))),
841 set_mem_alias_set (mem, setjmp_alias_set);
842
843 emit_move_insn (validize_mem (mem),
844 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
845
846 stack_save = gen_rtx_MEM (sa_mode,
847 plus_constant (Pmode, buf_addr,
848 2 * GET_MODE_SIZE (Pmode)));
849 set_mem_alias_set (stack_save, setjmp_alias_set);
850 emit_stack_save (SAVE_NONLOCAL, &stack_save);
851
852 /* If there is further processing to do, do it. */
853 if (targetm.have_builtin_setjmp_setup ())
854 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
855
856 /* We have a nonlocal label. */
857 cfun->has_nonlocal_label = 1;
858 }
859
860 /* Construct the trailing part of a __builtin_setjmp call. This is
861 also called directly by the SJLJ exception handling code.
862 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
863
864 void
865 expand_builtin_setjmp_receiver (rtx receiver_label)
866 {
867 rtx chain;
868
869 /* Mark the FP as used when we get here, so we have to make sure it's
870 marked as used by this function. */
871 emit_use (hard_frame_pointer_rtx);
872
873 /* Mark the static chain as clobbered here so life information
874 doesn't get messed up for it. */
875 chain = targetm.calls.static_chain (current_function_decl, true);
876 if (chain && REG_P (chain))
877 emit_clobber (chain);
878
879 /* Now put in the code to restore the frame pointer, and argument
880 pointer, if needed. */
881 if (! targetm.have_nonlocal_goto ())
882 {
883 /* First adjust our frame pointer to its actual value. It was
884 previously set to the start of the virtual area corresponding to
885 the stacked variables when we branched here and now needs to be
886 adjusted to the actual hardware fp value.
887
888 Assignments to virtual registers are converted by
889 instantiate_virtual_regs into the corresponding assignment
890 to the underlying register (fp in this case) that makes
891 the original assignment true.
892 So the following insn will actually be decrementing fp by
893 STARTING_FRAME_OFFSET. */
894 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
895
896 /* Restoring the frame pointer also modifies the hard frame pointer.
897 Mark it used (so that the previous assignment remains live once
898 the frame pointer is eliminated) and clobbered (to represent the
899 implicit update from the assignment). */
900 emit_use (hard_frame_pointer_rtx);
901 emit_clobber (hard_frame_pointer_rtx);
902 }
903
904 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
905 {
906 #ifdef ELIMINABLE_REGS
907 /* If the argument pointer can be eliminated in favor of the
908 frame pointer, we don't need to restore it. We assume here
909 that if such an elimination is present, it can always be used.
910 This is the case on all known machines; if we don't make this
911 assumption, we do unnecessary saving on many machines. */
912 size_t i;
913 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
914
915 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
916 if (elim_regs[i].from == ARG_POINTER_REGNUM
917 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
918 break;
919
920 if (i == ARRAY_SIZE (elim_regs))
921 #endif
922 {
923 /* Now restore our arg pointer from the address at which it
924 was saved in our stack frame. */
925 emit_move_insn (crtl->args.internal_arg_pointer,
926 copy_to_reg (get_arg_pointer_save_area ()));
927 }
928 }
929
930 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
931 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
932 else if (targetm.have_nonlocal_goto_receiver ())
933 emit_insn (targetm.gen_nonlocal_goto_receiver ());
934 else
935 { /* Nothing */ }
936
937 /* We must not allow the code we just generated to be reordered by
938 scheduling. Specifically, the update of the frame pointer must
939 happen immediately, not later. */
940 emit_insn (gen_blockage ());
941 }
942
943 /* __builtin_longjmp is passed a pointer to an array of five words (not
944 all will be used on all machines). It operates similarly to the C
945 library function of the same name, but is more efficient. Much of
946 the code below is copied from the handling of non-local gotos. */
947
948 static void
949 expand_builtin_longjmp (rtx buf_addr, rtx value)
950 {
951 rtx fp, lab, stack;
952 rtx_insn *insn, *last;
953 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
954
955 /* DRAP is needed for stack realign if longjmp is expanded to current
956 function */
957 if (SUPPORTS_STACK_ALIGNMENT)
958 crtl->need_drap = true;
959
960 if (setjmp_alias_set == -1)
961 setjmp_alias_set = new_alias_set ();
962
963 buf_addr = convert_memory_address (Pmode, buf_addr);
964
965 buf_addr = force_reg (Pmode, buf_addr);
966
967 /* We require that the user must pass a second argument of 1, because
968 that is what builtin_setjmp will return. */
969 gcc_assert (value == const1_rtx);
970
971 last = get_last_insn ();
972 if (targetm.have_builtin_longjmp ())
973 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
974 else
975 {
976 fp = gen_rtx_MEM (Pmode, buf_addr);
977 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
978 GET_MODE_SIZE (Pmode)));
979
980 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
981 2 * GET_MODE_SIZE (Pmode)));
982 set_mem_alias_set (fp, setjmp_alias_set);
983 set_mem_alias_set (lab, setjmp_alias_set);
984 set_mem_alias_set (stack, setjmp_alias_set);
985
986 /* Pick up FP, label, and SP from the block and jump. This code is
987 from expand_goto in stmt.c; see there for detailed comments. */
988 if (targetm.have_nonlocal_goto ())
989 /* We have to pass a value to the nonlocal_goto pattern that will
990 get copied into the static_chain pointer, but it does not matter
991 what that value is, because builtin_setjmp does not use it. */
992 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
993 else
994 {
995 lab = copy_to_reg (lab);
996
997 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
998 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
999
1000 emit_move_insn (hard_frame_pointer_rtx, fp);
1001 emit_stack_restore (SAVE_NONLOCAL, stack);
1002
1003 emit_use (hard_frame_pointer_rtx);
1004 emit_use (stack_pointer_rtx);
1005 emit_indirect_jump (lab);
1006 }
1007 }
1008
1009 /* Search backwards and mark the jump insn as a non-local goto.
1010 Note that this precludes the use of __builtin_longjmp to a
1011 __builtin_setjmp target in the same function. However, we've
1012 already cautioned the user that these functions are for
1013 internal exception handling use only. */
1014 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1015 {
1016 gcc_assert (insn != last);
1017
1018 if (JUMP_P (insn))
1019 {
1020 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1021 break;
1022 }
1023 else if (CALL_P (insn))
1024 break;
1025 }
1026 }
1027
1028 static inline bool
1029 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1030 {
1031 return (iter->i < iter->n);
1032 }
1033
1034 /* This function validates the types of a function call argument list
1035 against a specified list of tree_codes. If the last specifier is a 0,
1036 that represents an ellipses, otherwise the last specifier must be a
1037 VOID_TYPE. */
1038
1039 static bool
1040 validate_arglist (const_tree callexpr, ...)
1041 {
1042 enum tree_code code;
1043 bool res = 0;
1044 va_list ap;
1045 const_call_expr_arg_iterator iter;
1046 const_tree arg;
1047
1048 va_start (ap, callexpr);
1049 init_const_call_expr_arg_iterator (callexpr, &iter);
1050
1051 do
1052 {
1053 code = (enum tree_code) va_arg (ap, int);
1054 switch (code)
1055 {
1056 case 0:
1057 /* This signifies an ellipses, any further arguments are all ok. */
1058 res = true;
1059 goto end;
1060 case VOID_TYPE:
1061 /* This signifies an endlink, if no arguments remain, return
1062 true, otherwise return false. */
1063 res = !more_const_call_expr_args_p (&iter);
1064 goto end;
1065 default:
1066 /* If no parameters remain or the parameter's code does not
1067 match the specified code, return false. Otherwise continue
1068 checking any remaining arguments. */
1069 arg = next_const_call_expr_arg (&iter);
1070 if (!validate_arg (arg, code))
1071 goto end;
1072 break;
1073 }
1074 }
1075 while (1);
1076
1077 /* We need gotos here since we can only have one VA_CLOSE in a
1078 function. */
1079 end: ;
1080 va_end (ap);
1081
1082 return res;
1083 }
1084
1085 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1086 and the address of the save area. */
1087
1088 static rtx
1089 expand_builtin_nonlocal_goto (tree exp)
1090 {
1091 tree t_label, t_save_area;
1092 rtx r_label, r_save_area, r_fp, r_sp;
1093 rtx_insn *insn;
1094
1095 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1096 return NULL_RTX;
1097
1098 t_label = CALL_EXPR_ARG (exp, 0);
1099 t_save_area = CALL_EXPR_ARG (exp, 1);
1100
1101 r_label = expand_normal (t_label);
1102 r_label = convert_memory_address (Pmode, r_label);
1103 r_save_area = expand_normal (t_save_area);
1104 r_save_area = convert_memory_address (Pmode, r_save_area);
1105 /* Copy the address of the save location to a register just in case it was
1106 based on the frame pointer. */
1107 r_save_area = copy_to_reg (r_save_area);
1108 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1109 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1110 plus_constant (Pmode, r_save_area,
1111 GET_MODE_SIZE (Pmode)));
1112
1113 crtl->has_nonlocal_goto = 1;
1114
1115 /* ??? We no longer need to pass the static chain value, afaik. */
1116 if (targetm.have_nonlocal_goto ())
1117 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1118 else
1119 {
1120 r_label = copy_to_reg (r_label);
1121
1122 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1123 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1124
1125 /* Restore frame pointer for containing function. */
1126 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1127 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1128
1129 /* USE of hard_frame_pointer_rtx added for consistency;
1130 not clear if really needed. */
1131 emit_use (hard_frame_pointer_rtx);
1132 emit_use (stack_pointer_rtx);
1133
1134 /* If the architecture is using a GP register, we must
1135 conservatively assume that the target function makes use of it.
1136 The prologue of functions with nonlocal gotos must therefore
1137 initialize the GP register to the appropriate value, and we
1138 must then make sure that this value is live at the point
1139 of the jump. (Note that this doesn't necessarily apply
1140 to targets with a nonlocal_goto pattern; they are free
1141 to implement it in their own way. Note also that this is
1142 a no-op if the GP register is a global invariant.) */
1143 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1144 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1145 emit_use (pic_offset_table_rtx);
1146
1147 emit_indirect_jump (r_label);
1148 }
1149
1150 /* Search backwards to the jump insn and mark it as a
1151 non-local goto. */
1152 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1153 {
1154 if (JUMP_P (insn))
1155 {
1156 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1157 break;
1158 }
1159 else if (CALL_P (insn))
1160 break;
1161 }
1162
1163 return const0_rtx;
1164 }
1165
1166 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1167 (not all will be used on all machines) that was passed to __builtin_setjmp.
1168 It updates the stack pointer in that block to the current value. This is
1169 also called directly by the SJLJ exception handling code. */
1170
1171 void
1172 expand_builtin_update_setjmp_buf (rtx buf_addr)
1173 {
1174 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1175 rtx stack_save
1176 = gen_rtx_MEM (sa_mode,
1177 memory_address
1178 (sa_mode,
1179 plus_constant (Pmode, buf_addr,
1180 2 * GET_MODE_SIZE (Pmode))));
1181
1182 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1183 }
1184
1185 /* Expand a call to __builtin_prefetch. For a target that does not support
1186 data prefetch, evaluate the memory address argument in case it has side
1187 effects. */
1188
1189 static void
1190 expand_builtin_prefetch (tree exp)
1191 {
1192 tree arg0, arg1, arg2;
1193 int nargs;
1194 rtx op0, op1, op2;
1195
1196 if (!validate_arglist (exp, POINTER_TYPE, 0))
1197 return;
1198
1199 arg0 = CALL_EXPR_ARG (exp, 0);
1200
1201 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1202 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1203 locality). */
1204 nargs = call_expr_nargs (exp);
1205 if (nargs > 1)
1206 arg1 = CALL_EXPR_ARG (exp, 1);
1207 else
1208 arg1 = integer_zero_node;
1209 if (nargs > 2)
1210 arg2 = CALL_EXPR_ARG (exp, 2);
1211 else
1212 arg2 = integer_three_node;
1213
1214 /* Argument 0 is an address. */
1215 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1216
1217 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1218 if (TREE_CODE (arg1) != INTEGER_CST)
1219 {
1220 error ("second argument to %<__builtin_prefetch%> must be a constant");
1221 arg1 = integer_zero_node;
1222 }
1223 op1 = expand_normal (arg1);
1224 /* Argument 1 must be either zero or one. */
1225 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1226 {
1227 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1228 " using zero");
1229 op1 = const0_rtx;
1230 }
1231
1232 /* Argument 2 (locality) must be a compile-time constant int. */
1233 if (TREE_CODE (arg2) != INTEGER_CST)
1234 {
1235 error ("third argument to %<__builtin_prefetch%> must be a constant");
1236 arg2 = integer_zero_node;
1237 }
1238 op2 = expand_normal (arg2);
1239 /* Argument 2 must be 0, 1, 2, or 3. */
1240 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1241 {
1242 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1243 op2 = const0_rtx;
1244 }
1245
1246 if (targetm.have_prefetch ())
1247 {
1248 struct expand_operand ops[3];
1249
1250 create_address_operand (&ops[0], op0);
1251 create_integer_operand (&ops[1], INTVAL (op1));
1252 create_integer_operand (&ops[2], INTVAL (op2));
1253 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1254 return;
1255 }
1256
1257 /* Don't do anything with direct references to volatile memory, but
1258 generate code to handle other side effects. */
1259 if (!MEM_P (op0) && side_effects_p (op0))
1260 emit_insn (op0);
1261 }
1262
1263 /* Get a MEM rtx for expression EXP which is the address of an operand
1264 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1265 the maximum length of the block of memory that might be accessed or
1266 NULL if unknown. */
1267
1268 static rtx
1269 get_memory_rtx (tree exp, tree len)
1270 {
1271 tree orig_exp = exp;
1272 rtx addr, mem;
1273
1274 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1275 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1276 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1277 exp = TREE_OPERAND (exp, 0);
1278
1279 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1280 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1281
1282 /* Get an expression we can use to find the attributes to assign to MEM.
1283 First remove any nops. */
1284 while (CONVERT_EXPR_P (exp)
1285 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1286 exp = TREE_OPERAND (exp, 0);
1287
1288 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1289 (as builtin stringops may alias with anything). */
1290 exp = fold_build2 (MEM_REF,
1291 build_array_type (char_type_node,
1292 build_range_type (sizetype,
1293 size_one_node, len)),
1294 exp, build_int_cst (ptr_type_node, 0));
1295
1296 /* If the MEM_REF has no acceptable address, try to get the base object
1297 from the original address we got, and build an all-aliasing
1298 unknown-sized access to that one. */
1299 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1300 set_mem_attributes (mem, exp, 0);
1301 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1302 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1303 0))))
1304 {
1305 exp = build_fold_addr_expr (exp);
1306 exp = fold_build2 (MEM_REF,
1307 build_array_type (char_type_node,
1308 build_range_type (sizetype,
1309 size_zero_node,
1310 NULL)),
1311 exp, build_int_cst (ptr_type_node, 0));
1312 set_mem_attributes (mem, exp, 0);
1313 }
1314 set_mem_alias_set (mem, 0);
1315 return mem;
1316 }
1317 \f
1318 /* Built-in functions to perform an untyped call and return. */
1319
1320 #define apply_args_mode \
1321 (this_target_builtins->x_apply_args_mode)
1322 #define apply_result_mode \
1323 (this_target_builtins->x_apply_result_mode)
1324
1325 /* Return the size required for the block returned by __builtin_apply_args,
1326 and initialize apply_args_mode. */
1327
1328 static int
1329 apply_args_size (void)
1330 {
1331 static int size = -1;
1332 int align;
1333 unsigned int regno;
1334 machine_mode mode;
1335
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1338 {
1339 /* The first value is the incoming arg-pointer. */
1340 size = GET_MODE_SIZE (Pmode);
1341
1342 /* The second value is the structure value address unless this is
1343 passed as an "invisible" first argument. */
1344 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1345 size += GET_MODE_SIZE (Pmode);
1346
1347 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1348 if (FUNCTION_ARG_REGNO_P (regno))
1349 {
1350 mode = targetm.calls.get_raw_arg_mode (regno);
1351
1352 gcc_assert (mode != VOIDmode);
1353
1354 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1355 if (size % align != 0)
1356 size = CEIL (size, align) * align;
1357 size += GET_MODE_SIZE (mode);
1358 apply_args_mode[regno] = mode;
1359 }
1360 else
1361 {
1362 apply_args_mode[regno] = VOIDmode;
1363 }
1364 }
1365 return size;
1366 }
1367
1368 /* Return the size required for the block returned by __builtin_apply,
1369 and initialize apply_result_mode. */
1370
1371 static int
1372 apply_result_size (void)
1373 {
1374 static int size = -1;
1375 int align, regno;
1376 machine_mode mode;
1377
1378 /* The values computed by this function never change. */
1379 if (size < 0)
1380 {
1381 size = 0;
1382
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (targetm.calls.function_value_regno_p (regno))
1385 {
1386 mode = targetm.calls.get_raw_result_mode (regno);
1387
1388 gcc_assert (mode != VOIDmode);
1389
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_result_mode[regno] = mode;
1395 }
1396 else
1397 apply_result_mode[regno] = VOIDmode;
1398
1399 /* Allow targets that use untyped_call and untyped_return to override
1400 the size so that machine-specific information can be stored here. */
1401 #ifdef APPLY_RESULT_SIZE
1402 size = APPLY_RESULT_SIZE;
1403 #endif
1404 }
1405 return size;
1406 }
1407
1408 /* Create a vector describing the result block RESULT. If SAVEP is true,
1409 the result block is used to save the values; otherwise it is used to
1410 restore the values. */
1411
1412 static rtx
1413 result_vector (int savep, rtx result)
1414 {
1415 int regno, size, align, nelts;
1416 machine_mode mode;
1417 rtx reg, mem;
1418 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1419
1420 size = nelts = 0;
1421 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1422 if ((mode = apply_result_mode[regno]) != VOIDmode)
1423 {
1424 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1425 if (size % align != 0)
1426 size = CEIL (size, align) * align;
1427 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1428 mem = adjust_address (result, mode, size);
1429 savevec[nelts++] = (savep
1430 ? gen_rtx_SET (mem, reg)
1431 : gen_rtx_SET (reg, mem));
1432 size += GET_MODE_SIZE (mode);
1433 }
1434 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1435 }
1436
1437 /* Save the state required to perform an untyped call with the same
1438 arguments as were passed to the current function. */
1439
1440 static rtx
1441 expand_builtin_apply_args_1 (void)
1442 {
1443 rtx registers, tem;
1444 int size, align, regno;
1445 machine_mode mode;
1446 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1447
1448 /* Create a block where the arg-pointer, structure value address,
1449 and argument registers can be saved. */
1450 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1451
1452 /* Walk past the arg-pointer and structure value address. */
1453 size = GET_MODE_SIZE (Pmode);
1454 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1455 size += GET_MODE_SIZE (Pmode);
1456
1457 /* Save each register used in calling a function to the block. */
1458 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1459 if ((mode = apply_args_mode[regno]) != VOIDmode)
1460 {
1461 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1462 if (size % align != 0)
1463 size = CEIL (size, align) * align;
1464
1465 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1466
1467 emit_move_insn (adjust_address (registers, mode, size), tem);
1468 size += GET_MODE_SIZE (mode);
1469 }
1470
1471 /* Save the arg pointer to the block. */
1472 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1473 /* We need the pointer as the caller actually passed them to us, not
1474 as we might have pretended they were passed. Make sure it's a valid
1475 operand, as emit_move_insn isn't expected to handle a PLUS. */
1476 if (STACK_GROWS_DOWNWARD)
1477 tem
1478 = force_operand (plus_constant (Pmode, tem,
1479 crtl->args.pretend_args_size),
1480 NULL_RTX);
1481 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1482
1483 size = GET_MODE_SIZE (Pmode);
1484
1485 /* Save the structure value address unless this is passed as an
1486 "invisible" first argument. */
1487 if (struct_incoming_value)
1488 {
1489 emit_move_insn (adjust_address (registers, Pmode, size),
1490 copy_to_reg (struct_incoming_value));
1491 size += GET_MODE_SIZE (Pmode);
1492 }
1493
1494 /* Return the address of the block. */
1495 return copy_addr_to_reg (XEXP (registers, 0));
1496 }
1497
1498 /* __builtin_apply_args returns block of memory allocated on
1499 the stack into which is stored the arg pointer, structure
1500 value address, static chain, and all the registers that might
1501 possibly be used in performing a function call. The code is
1502 moved to the start of the function so the incoming values are
1503 saved. */
1504
1505 static rtx
1506 expand_builtin_apply_args (void)
1507 {
1508 /* Don't do __builtin_apply_args more than once in a function.
1509 Save the result of the first call and reuse it. */
1510 if (apply_args_value != 0)
1511 return apply_args_value;
1512 {
1513 /* When this function is called, it means that registers must be
1514 saved on entry to this function. So we migrate the
1515 call to the first insn of this function. */
1516 rtx temp;
1517
1518 start_sequence ();
1519 temp = expand_builtin_apply_args_1 ();
1520 rtx_insn *seq = get_insns ();
1521 end_sequence ();
1522
1523 apply_args_value = temp;
1524
1525 /* Put the insns after the NOTE that starts the function.
1526 If this is inside a start_sequence, make the outer-level insn
1527 chain current, so the code is placed at the start of the
1528 function. If internal_arg_pointer is a non-virtual pseudo,
1529 it needs to be placed after the function that initializes
1530 that pseudo. */
1531 push_topmost_sequence ();
1532 if (REG_P (crtl->args.internal_arg_pointer)
1533 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1534 emit_insn_before (seq, parm_birth_insn);
1535 else
1536 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1537 pop_topmost_sequence ();
1538 return temp;
1539 }
1540 }
1541
1542 /* Perform an untyped call and save the state required to perform an
1543 untyped return of whatever value was returned by the given function. */
1544
1545 static rtx
1546 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1547 {
1548 int size, align, regno;
1549 machine_mode mode;
1550 rtx incoming_args, result, reg, dest, src;
1551 rtx_call_insn *call_insn;
1552 rtx old_stack_level = 0;
1553 rtx call_fusage = 0;
1554 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1555
1556 arguments = convert_memory_address (Pmode, arguments);
1557
1558 /* Create a block where the return registers can be saved. */
1559 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1560
1561 /* Fetch the arg pointer from the ARGUMENTS block. */
1562 incoming_args = gen_reg_rtx (Pmode);
1563 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1564 if (!STACK_GROWS_DOWNWARD)
1565 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1566 incoming_args, 0, OPTAB_LIB_WIDEN);
1567
1568 /* Push a new argument block and copy the arguments. Do not allow
1569 the (potential) memcpy call below to interfere with our stack
1570 manipulations. */
1571 do_pending_stack_adjust ();
1572 NO_DEFER_POP;
1573
1574 /* Save the stack with nonlocal if available. */
1575 if (targetm.have_save_stack_nonlocal ())
1576 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1577 else
1578 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1579
1580 /* Allocate a block of memory onto the stack and copy the memory
1581 arguments to the outgoing arguments address. We can pass TRUE
1582 as the 4th argument because we just saved the stack pointer
1583 and will restore it right after the call. */
1584 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1585
1586 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1587 may have already set current_function_calls_alloca to true.
1588 current_function_calls_alloca won't be set if argsize is zero,
1589 so we have to guarantee need_drap is true here. */
1590 if (SUPPORTS_STACK_ALIGNMENT)
1591 crtl->need_drap = true;
1592
1593 dest = virtual_outgoing_args_rtx;
1594 if (!STACK_GROWS_DOWNWARD)
1595 {
1596 if (CONST_INT_P (argsize))
1597 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1598 else
1599 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1600 }
1601 dest = gen_rtx_MEM (BLKmode, dest);
1602 set_mem_align (dest, PARM_BOUNDARY);
1603 src = gen_rtx_MEM (BLKmode, incoming_args);
1604 set_mem_align (src, PARM_BOUNDARY);
1605 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1606
1607 /* Refer to the argument block. */
1608 apply_args_size ();
1609 arguments = gen_rtx_MEM (BLKmode, arguments);
1610 set_mem_align (arguments, PARM_BOUNDARY);
1611
1612 /* Walk past the arg-pointer and structure value address. */
1613 size = GET_MODE_SIZE (Pmode);
1614 if (struct_value)
1615 size += GET_MODE_SIZE (Pmode);
1616
1617 /* Restore each of the registers previously saved. Make USE insns
1618 for each of these registers for use in making the call. */
1619 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1620 if ((mode = apply_args_mode[regno]) != VOIDmode)
1621 {
1622 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1623 if (size % align != 0)
1624 size = CEIL (size, align) * align;
1625 reg = gen_rtx_REG (mode, regno);
1626 emit_move_insn (reg, adjust_address (arguments, mode, size));
1627 use_reg (&call_fusage, reg);
1628 size += GET_MODE_SIZE (mode);
1629 }
1630
1631 /* Restore the structure value address unless this is passed as an
1632 "invisible" first argument. */
1633 size = GET_MODE_SIZE (Pmode);
1634 if (struct_value)
1635 {
1636 rtx value = gen_reg_rtx (Pmode);
1637 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1638 emit_move_insn (struct_value, value);
1639 if (REG_P (struct_value))
1640 use_reg (&call_fusage, struct_value);
1641 size += GET_MODE_SIZE (Pmode);
1642 }
1643
1644 /* All arguments and registers used for the call are set up by now! */
1645 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1646
1647 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1648 and we don't want to load it into a register as an optimization,
1649 because prepare_call_address already did it if it should be done. */
1650 if (GET_CODE (function) != SYMBOL_REF)
1651 function = memory_address (FUNCTION_MODE, function);
1652
1653 /* Generate the actual call instruction and save the return value. */
1654 if (targetm.have_untyped_call ())
1655 {
1656 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1657 emit_call_insn (targetm.gen_untyped_call (mem, result,
1658 result_vector (1, result)));
1659 }
1660 else if (targetm.have_call_value ())
1661 {
1662 rtx valreg = 0;
1663
1664 /* Locate the unique return register. It is not possible to
1665 express a call that sets more than one return register using
1666 call_value; use untyped_call for that. In fact, untyped_call
1667 only needs to save the return registers in the given block. */
1668 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1669 if ((mode = apply_result_mode[regno]) != VOIDmode)
1670 {
1671 gcc_assert (!valreg); /* have_untyped_call required. */
1672
1673 valreg = gen_rtx_REG (mode, regno);
1674 }
1675
1676 emit_insn (targetm.gen_call_value (valreg,
1677 gen_rtx_MEM (FUNCTION_MODE, function),
1678 const0_rtx, NULL_RTX, const0_rtx));
1679
1680 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1681 }
1682 else
1683 gcc_unreachable ();
1684
1685 /* Find the CALL insn we just emitted, and attach the register usage
1686 information. */
1687 call_insn = last_call_insn ();
1688 add_function_usage_to (call_insn, call_fusage);
1689
1690 /* Restore the stack. */
1691 if (targetm.have_save_stack_nonlocal ())
1692 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1693 else
1694 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1695 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1696
1697 OK_DEFER_POP;
1698
1699 /* Return the address of the result block. */
1700 result = copy_addr_to_reg (XEXP (result, 0));
1701 return convert_memory_address (ptr_mode, result);
1702 }
1703
1704 /* Perform an untyped return. */
1705
1706 static void
1707 expand_builtin_return (rtx result)
1708 {
1709 int size, align, regno;
1710 machine_mode mode;
1711 rtx reg;
1712 rtx_insn *call_fusage = 0;
1713
1714 result = convert_memory_address (Pmode, result);
1715
1716 apply_result_size ();
1717 result = gen_rtx_MEM (BLKmode, result);
1718
1719 if (targetm.have_untyped_return ())
1720 {
1721 rtx vector = result_vector (0, result);
1722 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1723 emit_barrier ();
1724 return;
1725 }
1726
1727 /* Restore the return value and note that each value is used. */
1728 size = 0;
1729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1730 if ((mode = apply_result_mode[regno]) != VOIDmode)
1731 {
1732 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1733 if (size % align != 0)
1734 size = CEIL (size, align) * align;
1735 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1736 emit_move_insn (reg, adjust_address (result, mode, size));
1737
1738 push_to_sequence (call_fusage);
1739 emit_use (reg);
1740 call_fusage = get_insns ();
1741 end_sequence ();
1742 size += GET_MODE_SIZE (mode);
1743 }
1744
1745 /* Put the USE insns before the return. */
1746 emit_insn (call_fusage);
1747
1748 /* Return whatever values was restored by jumping directly to the end
1749 of the function. */
1750 expand_naked_return ();
1751 }
1752
1753 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1754
1755 static enum type_class
1756 type_to_class (tree type)
1757 {
1758 switch (TREE_CODE (type))
1759 {
1760 case VOID_TYPE: return void_type_class;
1761 case INTEGER_TYPE: return integer_type_class;
1762 case ENUMERAL_TYPE: return enumeral_type_class;
1763 case BOOLEAN_TYPE: return boolean_type_class;
1764 case POINTER_TYPE: return pointer_type_class;
1765 case REFERENCE_TYPE: return reference_type_class;
1766 case OFFSET_TYPE: return offset_type_class;
1767 case REAL_TYPE: return real_type_class;
1768 case COMPLEX_TYPE: return complex_type_class;
1769 case FUNCTION_TYPE: return function_type_class;
1770 case METHOD_TYPE: return method_type_class;
1771 case RECORD_TYPE: return record_type_class;
1772 case UNION_TYPE:
1773 case QUAL_UNION_TYPE: return union_type_class;
1774 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1775 ? string_type_class : array_type_class);
1776 case LANG_TYPE: return lang_type_class;
1777 default: return no_type_class;
1778 }
1779 }
1780
1781 /* Expand a call EXP to __builtin_classify_type. */
1782
1783 static rtx
1784 expand_builtin_classify_type (tree exp)
1785 {
1786 if (call_expr_nargs (exp))
1787 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1788 return GEN_INT (no_type_class);
1789 }
1790
1791 /* This helper macro, meant to be used in mathfn_built_in below,
1792 determines which among a set of three builtin math functions is
1793 appropriate for a given type mode. The `F' and `L' cases are
1794 automatically generated from the `double' case. */
1795 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1796 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1797 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1798 fcodel = BUILT_IN_MATHFN##L ; break;
1799 /* Similar to above, but appends _R after any F/L suffix. */
1800 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1801 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1802 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1803 fcodel = BUILT_IN_MATHFN##L_R ; break;
1804
1805 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1806 if available. If IMPLICIT is true use the implicit builtin declaration,
1807 otherwise use the explicit declaration. If we can't do the conversion,
1808 return zero. */
1809
1810 static tree
1811 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1812 {
1813 enum built_in_function fcode, fcodef, fcodel, fcode2;
1814
1815 switch (fn)
1816 {
1817 CASE_MATHFN (BUILT_IN_ACOS)
1818 CASE_MATHFN (BUILT_IN_ACOSH)
1819 CASE_MATHFN (BUILT_IN_ASIN)
1820 CASE_MATHFN (BUILT_IN_ASINH)
1821 CASE_MATHFN (BUILT_IN_ATAN)
1822 CASE_MATHFN (BUILT_IN_ATAN2)
1823 CASE_MATHFN (BUILT_IN_ATANH)
1824 CASE_MATHFN (BUILT_IN_CBRT)
1825 CASE_MATHFN (BUILT_IN_CEIL)
1826 CASE_MATHFN (BUILT_IN_CEXPI)
1827 CASE_MATHFN (BUILT_IN_COPYSIGN)
1828 CASE_MATHFN (BUILT_IN_COS)
1829 CASE_MATHFN (BUILT_IN_COSH)
1830 CASE_MATHFN (BUILT_IN_DREM)
1831 CASE_MATHFN (BUILT_IN_ERF)
1832 CASE_MATHFN (BUILT_IN_ERFC)
1833 CASE_MATHFN (BUILT_IN_EXP)
1834 CASE_MATHFN (BUILT_IN_EXP10)
1835 CASE_MATHFN (BUILT_IN_EXP2)
1836 CASE_MATHFN (BUILT_IN_EXPM1)
1837 CASE_MATHFN (BUILT_IN_FABS)
1838 CASE_MATHFN (BUILT_IN_FDIM)
1839 CASE_MATHFN (BUILT_IN_FLOOR)
1840 CASE_MATHFN (BUILT_IN_FMA)
1841 CASE_MATHFN (BUILT_IN_FMAX)
1842 CASE_MATHFN (BUILT_IN_FMIN)
1843 CASE_MATHFN (BUILT_IN_FMOD)
1844 CASE_MATHFN (BUILT_IN_FREXP)
1845 CASE_MATHFN (BUILT_IN_GAMMA)
1846 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1847 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1848 CASE_MATHFN (BUILT_IN_HYPOT)
1849 CASE_MATHFN (BUILT_IN_ILOGB)
1850 CASE_MATHFN (BUILT_IN_ICEIL)
1851 CASE_MATHFN (BUILT_IN_IFLOOR)
1852 CASE_MATHFN (BUILT_IN_INF)
1853 CASE_MATHFN (BUILT_IN_IRINT)
1854 CASE_MATHFN (BUILT_IN_IROUND)
1855 CASE_MATHFN (BUILT_IN_ISINF)
1856 CASE_MATHFN (BUILT_IN_J0)
1857 CASE_MATHFN (BUILT_IN_J1)
1858 CASE_MATHFN (BUILT_IN_JN)
1859 CASE_MATHFN (BUILT_IN_LCEIL)
1860 CASE_MATHFN (BUILT_IN_LDEXP)
1861 CASE_MATHFN (BUILT_IN_LFLOOR)
1862 CASE_MATHFN (BUILT_IN_LGAMMA)
1863 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1864 CASE_MATHFN (BUILT_IN_LLCEIL)
1865 CASE_MATHFN (BUILT_IN_LLFLOOR)
1866 CASE_MATHFN (BUILT_IN_LLRINT)
1867 CASE_MATHFN (BUILT_IN_LLROUND)
1868 CASE_MATHFN (BUILT_IN_LOG)
1869 CASE_MATHFN (BUILT_IN_LOG10)
1870 CASE_MATHFN (BUILT_IN_LOG1P)
1871 CASE_MATHFN (BUILT_IN_LOG2)
1872 CASE_MATHFN (BUILT_IN_LOGB)
1873 CASE_MATHFN (BUILT_IN_LRINT)
1874 CASE_MATHFN (BUILT_IN_LROUND)
1875 CASE_MATHFN (BUILT_IN_MODF)
1876 CASE_MATHFN (BUILT_IN_NAN)
1877 CASE_MATHFN (BUILT_IN_NANS)
1878 CASE_MATHFN (BUILT_IN_NEARBYINT)
1879 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1880 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1881 CASE_MATHFN (BUILT_IN_POW)
1882 CASE_MATHFN (BUILT_IN_POWI)
1883 CASE_MATHFN (BUILT_IN_POW10)
1884 CASE_MATHFN (BUILT_IN_REMAINDER)
1885 CASE_MATHFN (BUILT_IN_REMQUO)
1886 CASE_MATHFN (BUILT_IN_RINT)
1887 CASE_MATHFN (BUILT_IN_ROUND)
1888 CASE_MATHFN (BUILT_IN_SCALB)
1889 CASE_MATHFN (BUILT_IN_SCALBLN)
1890 CASE_MATHFN (BUILT_IN_SCALBN)
1891 CASE_MATHFN (BUILT_IN_SIGNBIT)
1892 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1893 CASE_MATHFN (BUILT_IN_SIN)
1894 CASE_MATHFN (BUILT_IN_SINCOS)
1895 CASE_MATHFN (BUILT_IN_SINH)
1896 CASE_MATHFN (BUILT_IN_SQRT)
1897 CASE_MATHFN (BUILT_IN_TAN)
1898 CASE_MATHFN (BUILT_IN_TANH)
1899 CASE_MATHFN (BUILT_IN_TGAMMA)
1900 CASE_MATHFN (BUILT_IN_TRUNC)
1901 CASE_MATHFN (BUILT_IN_Y0)
1902 CASE_MATHFN (BUILT_IN_Y1)
1903 CASE_MATHFN (BUILT_IN_YN)
1904
1905 default:
1906 return NULL_TREE;
1907 }
1908
1909 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1910 fcode2 = fcode;
1911 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1912 fcode2 = fcodef;
1913 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1914 fcode2 = fcodel;
1915 else
1916 return NULL_TREE;
1917
1918 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1919 return NULL_TREE;
1920
1921 return builtin_decl_explicit (fcode2);
1922 }
1923
1924 /* Like mathfn_built_in_1(), but always use the implicit array. */
1925
1926 tree
1927 mathfn_built_in (tree type, enum built_in_function fn)
1928 {
1929 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1930 }
1931
1932 /* If errno must be maintained, expand the RTL to check if the result,
1933 TARGET, of a built-in function call, EXP, is NaN, and if so set
1934 errno to EDOM. */
1935
1936 static void
1937 expand_errno_check (tree exp, rtx target)
1938 {
1939 rtx_code_label *lab = gen_label_rtx ();
1940
1941 /* Test the result; if it is NaN, set errno=EDOM because
1942 the argument was not in the domain. */
1943 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1944 NULL_RTX, NULL, lab,
1945 /* The jump is very likely. */
1946 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1947
1948 #ifdef TARGET_EDOM
1949 /* If this built-in doesn't throw an exception, set errno directly. */
1950 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1951 {
1952 #ifdef GEN_ERRNO_RTX
1953 rtx errno_rtx = GEN_ERRNO_RTX;
1954 #else
1955 rtx errno_rtx
1956 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1957 #endif
1958 emit_move_insn (errno_rtx,
1959 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1960 emit_label (lab);
1961 return;
1962 }
1963 #endif
1964
1965 /* Make sure the library call isn't expanded as a tail call. */
1966 CALL_EXPR_TAILCALL (exp) = 0;
1967
1968 /* We can't set errno=EDOM directly; let the library call do it.
1969 Pop the arguments right away in case the call gets deleted. */
1970 NO_DEFER_POP;
1971 expand_call (exp, target, 0);
1972 OK_DEFER_POP;
1973 emit_label (lab);
1974 }
1975
1976 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1977 Return NULL_RTX if a normal call should be emitted rather than expanding
1978 the function in-line. EXP is the expression that is a call to the builtin
1979 function; if convenient, the result should be placed in TARGET.
1980 SUBTARGET may be used as the target for computing one of EXP's operands. */
1981
1982 static rtx
1983 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1984 {
1985 optab builtin_optab;
1986 rtx op0;
1987 rtx_insn *insns;
1988 tree fndecl = get_callee_fndecl (exp);
1989 machine_mode mode;
1990 bool errno_set = false;
1991 bool try_widening = false;
1992 tree arg;
1993
1994 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1995 return NULL_RTX;
1996
1997 arg = CALL_EXPR_ARG (exp, 0);
1998
1999 switch (DECL_FUNCTION_CODE (fndecl))
2000 {
2001 CASE_FLT_FN (BUILT_IN_SQRT):
2002 errno_set = ! tree_expr_nonnegative_p (arg);
2003 try_widening = true;
2004 builtin_optab = sqrt_optab;
2005 break;
2006 CASE_FLT_FN (BUILT_IN_EXP):
2007 errno_set = true; builtin_optab = exp_optab; break;
2008 CASE_FLT_FN (BUILT_IN_EXP10):
2009 CASE_FLT_FN (BUILT_IN_POW10):
2010 errno_set = true; builtin_optab = exp10_optab; break;
2011 CASE_FLT_FN (BUILT_IN_EXP2):
2012 errno_set = true; builtin_optab = exp2_optab; break;
2013 CASE_FLT_FN (BUILT_IN_EXPM1):
2014 errno_set = true; builtin_optab = expm1_optab; break;
2015 CASE_FLT_FN (BUILT_IN_LOGB):
2016 errno_set = true; builtin_optab = logb_optab; break;
2017 CASE_FLT_FN (BUILT_IN_LOG):
2018 errno_set = true; builtin_optab = log_optab; break;
2019 CASE_FLT_FN (BUILT_IN_LOG10):
2020 errno_set = true; builtin_optab = log10_optab; break;
2021 CASE_FLT_FN (BUILT_IN_LOG2):
2022 errno_set = true; builtin_optab = log2_optab; break;
2023 CASE_FLT_FN (BUILT_IN_LOG1P):
2024 errno_set = true; builtin_optab = log1p_optab; break;
2025 CASE_FLT_FN (BUILT_IN_ASIN):
2026 builtin_optab = asin_optab; break;
2027 CASE_FLT_FN (BUILT_IN_ACOS):
2028 builtin_optab = acos_optab; break;
2029 CASE_FLT_FN (BUILT_IN_TAN):
2030 builtin_optab = tan_optab; break;
2031 CASE_FLT_FN (BUILT_IN_ATAN):
2032 builtin_optab = atan_optab; break;
2033 CASE_FLT_FN (BUILT_IN_FLOOR):
2034 builtin_optab = floor_optab; break;
2035 CASE_FLT_FN (BUILT_IN_CEIL):
2036 builtin_optab = ceil_optab; break;
2037 CASE_FLT_FN (BUILT_IN_TRUNC):
2038 builtin_optab = btrunc_optab; break;
2039 CASE_FLT_FN (BUILT_IN_ROUND):
2040 builtin_optab = round_optab; break;
2041 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2042 builtin_optab = nearbyint_optab;
2043 if (flag_trapping_math)
2044 break;
2045 /* Else fallthrough and expand as rint. */
2046 CASE_FLT_FN (BUILT_IN_RINT):
2047 builtin_optab = rint_optab; break;
2048 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2049 builtin_optab = significand_optab; break;
2050 default:
2051 gcc_unreachable ();
2052 }
2053
2054 /* Make a suitable register to place result in. */
2055 mode = TYPE_MODE (TREE_TYPE (exp));
2056
2057 if (! flag_errno_math || ! HONOR_NANS (mode))
2058 errno_set = false;
2059
2060 /* Before working hard, check whether the instruction is available, but try
2061 to widen the mode for specific operations. */
2062 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2063 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2064 && (!errno_set || !optimize_insn_for_size_p ()))
2065 {
2066 rtx result = gen_reg_rtx (mode);
2067
2068 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2069 need to expand the argument again. This way, we will not perform
2070 side-effects more the once. */
2071 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2072
2073 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2074
2075 start_sequence ();
2076
2077 /* Compute into RESULT.
2078 Set RESULT to wherever the result comes back. */
2079 result = expand_unop (mode, builtin_optab, op0, result, 0);
2080
2081 if (result != 0)
2082 {
2083 if (errno_set)
2084 expand_errno_check (exp, result);
2085
2086 /* Output the entire sequence. */
2087 insns = get_insns ();
2088 end_sequence ();
2089 emit_insn (insns);
2090 return result;
2091 }
2092
2093 /* If we were unable to expand via the builtin, stop the sequence
2094 (without outputting the insns) and call to the library function
2095 with the stabilized argument list. */
2096 end_sequence ();
2097 }
2098
2099 return expand_call (exp, target, target == const0_rtx);
2100 }
2101
2102 /* Expand a call to the builtin binary math functions (pow and atan2).
2103 Return NULL_RTX if a normal call should be emitted rather than expanding the
2104 function in-line. EXP is the expression that is a call to the builtin
2105 function; if convenient, the result should be placed in TARGET.
2106 SUBTARGET may be used as the target for computing one of EXP's
2107 operands. */
2108
2109 static rtx
2110 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2111 {
2112 optab builtin_optab;
2113 rtx op0, op1, result;
2114 rtx_insn *insns;
2115 int op1_type = REAL_TYPE;
2116 tree fndecl = get_callee_fndecl (exp);
2117 tree arg0, arg1;
2118 machine_mode mode;
2119 bool errno_set = true;
2120
2121 switch (DECL_FUNCTION_CODE (fndecl))
2122 {
2123 CASE_FLT_FN (BUILT_IN_SCALBN):
2124 CASE_FLT_FN (BUILT_IN_SCALBLN):
2125 CASE_FLT_FN (BUILT_IN_LDEXP):
2126 op1_type = INTEGER_TYPE;
2127 default:
2128 break;
2129 }
2130
2131 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2132 return NULL_RTX;
2133
2134 arg0 = CALL_EXPR_ARG (exp, 0);
2135 arg1 = CALL_EXPR_ARG (exp, 1);
2136
2137 switch (DECL_FUNCTION_CODE (fndecl))
2138 {
2139 CASE_FLT_FN (BUILT_IN_POW):
2140 builtin_optab = pow_optab; break;
2141 CASE_FLT_FN (BUILT_IN_ATAN2):
2142 builtin_optab = atan2_optab; break;
2143 CASE_FLT_FN (BUILT_IN_SCALB):
2144 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2145 return 0;
2146 builtin_optab = scalb_optab; break;
2147 CASE_FLT_FN (BUILT_IN_SCALBN):
2148 CASE_FLT_FN (BUILT_IN_SCALBLN):
2149 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2150 return 0;
2151 /* Fall through... */
2152 CASE_FLT_FN (BUILT_IN_LDEXP):
2153 builtin_optab = ldexp_optab; break;
2154 CASE_FLT_FN (BUILT_IN_FMOD):
2155 builtin_optab = fmod_optab; break;
2156 CASE_FLT_FN (BUILT_IN_REMAINDER):
2157 CASE_FLT_FN (BUILT_IN_DREM):
2158 builtin_optab = remainder_optab; break;
2159 default:
2160 gcc_unreachable ();
2161 }
2162
2163 /* Make a suitable register to place result in. */
2164 mode = TYPE_MODE (TREE_TYPE (exp));
2165
2166 /* Before working hard, check whether the instruction is available. */
2167 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2168 return NULL_RTX;
2169
2170 result = gen_reg_rtx (mode);
2171
2172 if (! flag_errno_math || ! HONOR_NANS (mode))
2173 errno_set = false;
2174
2175 if (errno_set && optimize_insn_for_size_p ())
2176 return 0;
2177
2178 /* Always stabilize the argument list. */
2179 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2180 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2181
2182 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2183 op1 = expand_normal (arg1);
2184
2185 start_sequence ();
2186
2187 /* Compute into RESULT.
2188 Set RESULT to wherever the result comes back. */
2189 result = expand_binop (mode, builtin_optab, op0, op1,
2190 result, 0, OPTAB_DIRECT);
2191
2192 /* If we were unable to expand via the builtin, stop the sequence
2193 (without outputting the insns) and call to the library function
2194 with the stabilized argument list. */
2195 if (result == 0)
2196 {
2197 end_sequence ();
2198 return expand_call (exp, target, target == const0_rtx);
2199 }
2200
2201 if (errno_set)
2202 expand_errno_check (exp, result);
2203
2204 /* Output the entire sequence. */
2205 insns = get_insns ();
2206 end_sequence ();
2207 emit_insn (insns);
2208
2209 return result;
2210 }
2211
2212 /* Expand a call to the builtin trinary math functions (fma).
2213 Return NULL_RTX if a normal call should be emitted rather than expanding the
2214 function in-line. EXP is the expression that is a call to the builtin
2215 function; if convenient, the result should be placed in TARGET.
2216 SUBTARGET may be used as the target for computing one of EXP's
2217 operands. */
2218
2219 static rtx
2220 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2221 {
2222 optab builtin_optab;
2223 rtx op0, op1, op2, result;
2224 rtx_insn *insns;
2225 tree fndecl = get_callee_fndecl (exp);
2226 tree arg0, arg1, arg2;
2227 machine_mode mode;
2228
2229 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2230 return NULL_RTX;
2231
2232 arg0 = CALL_EXPR_ARG (exp, 0);
2233 arg1 = CALL_EXPR_ARG (exp, 1);
2234 arg2 = CALL_EXPR_ARG (exp, 2);
2235
2236 switch (DECL_FUNCTION_CODE (fndecl))
2237 {
2238 CASE_FLT_FN (BUILT_IN_FMA):
2239 builtin_optab = fma_optab; break;
2240 default:
2241 gcc_unreachable ();
2242 }
2243
2244 /* Make a suitable register to place result in. */
2245 mode = TYPE_MODE (TREE_TYPE (exp));
2246
2247 /* Before working hard, check whether the instruction is available. */
2248 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2249 return NULL_RTX;
2250
2251 result = gen_reg_rtx (mode);
2252
2253 /* Always stabilize the argument list. */
2254 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2255 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2256 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2257
2258 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2259 op1 = expand_normal (arg1);
2260 op2 = expand_normal (arg2);
2261
2262 start_sequence ();
2263
2264 /* Compute into RESULT.
2265 Set RESULT to wherever the result comes back. */
2266 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2267 result, 0);
2268
2269 /* If we were unable to expand via the builtin, stop the sequence
2270 (without outputting the insns) and call to the library function
2271 with the stabilized argument list. */
2272 if (result == 0)
2273 {
2274 end_sequence ();
2275 return expand_call (exp, target, target == const0_rtx);
2276 }
2277
2278 /* Output the entire sequence. */
2279 insns = get_insns ();
2280 end_sequence ();
2281 emit_insn (insns);
2282
2283 return result;
2284 }
2285
2286 /* Expand a call to the builtin sin and cos math functions.
2287 Return NULL_RTX if a normal call should be emitted rather than expanding the
2288 function in-line. EXP is the expression that is a call to the builtin
2289 function; if convenient, the result should be placed in TARGET.
2290 SUBTARGET may be used as the target for computing one of EXP's
2291 operands. */
2292
2293 static rtx
2294 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2295 {
2296 optab builtin_optab;
2297 rtx op0;
2298 rtx_insn *insns;
2299 tree fndecl = get_callee_fndecl (exp);
2300 machine_mode mode;
2301 tree arg;
2302
2303 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2304 return NULL_RTX;
2305
2306 arg = CALL_EXPR_ARG (exp, 0);
2307
2308 switch (DECL_FUNCTION_CODE (fndecl))
2309 {
2310 CASE_FLT_FN (BUILT_IN_SIN):
2311 CASE_FLT_FN (BUILT_IN_COS):
2312 builtin_optab = sincos_optab; break;
2313 default:
2314 gcc_unreachable ();
2315 }
2316
2317 /* Make a suitable register to place result in. */
2318 mode = TYPE_MODE (TREE_TYPE (exp));
2319
2320 /* Check if sincos insn is available, otherwise fallback
2321 to sin or cos insn. */
2322 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2323 switch (DECL_FUNCTION_CODE (fndecl))
2324 {
2325 CASE_FLT_FN (BUILT_IN_SIN):
2326 builtin_optab = sin_optab; break;
2327 CASE_FLT_FN (BUILT_IN_COS):
2328 builtin_optab = cos_optab; break;
2329 default:
2330 gcc_unreachable ();
2331 }
2332
2333 /* Before working hard, check whether the instruction is available. */
2334 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2335 {
2336 rtx result = gen_reg_rtx (mode);
2337
2338 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2339 need to expand the argument again. This way, we will not perform
2340 side-effects more the once. */
2341 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2342
2343 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2344
2345 start_sequence ();
2346
2347 /* Compute into RESULT.
2348 Set RESULT to wherever the result comes back. */
2349 if (builtin_optab == sincos_optab)
2350 {
2351 int ok;
2352
2353 switch (DECL_FUNCTION_CODE (fndecl))
2354 {
2355 CASE_FLT_FN (BUILT_IN_SIN):
2356 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2357 break;
2358 CASE_FLT_FN (BUILT_IN_COS):
2359 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2360 break;
2361 default:
2362 gcc_unreachable ();
2363 }
2364 gcc_assert (ok);
2365 }
2366 else
2367 result = expand_unop (mode, builtin_optab, op0, result, 0);
2368
2369 if (result != 0)
2370 {
2371 /* Output the entire sequence. */
2372 insns = get_insns ();
2373 end_sequence ();
2374 emit_insn (insns);
2375 return result;
2376 }
2377
2378 /* If we were unable to expand via the builtin, stop the sequence
2379 (without outputting the insns) and call to the library function
2380 with the stabilized argument list. */
2381 end_sequence ();
2382 }
2383
2384 return expand_call (exp, target, target == const0_rtx);
2385 }
2386
2387 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2388 return an RTL instruction code that implements the functionality.
2389 If that isn't possible or available return CODE_FOR_nothing. */
2390
2391 static enum insn_code
2392 interclass_mathfn_icode (tree arg, tree fndecl)
2393 {
2394 bool errno_set = false;
2395 optab builtin_optab = unknown_optab;
2396 machine_mode mode;
2397
2398 switch (DECL_FUNCTION_CODE (fndecl))
2399 {
2400 CASE_FLT_FN (BUILT_IN_ILOGB):
2401 errno_set = true; builtin_optab = ilogb_optab; break;
2402 CASE_FLT_FN (BUILT_IN_ISINF):
2403 builtin_optab = isinf_optab; break;
2404 case BUILT_IN_ISNORMAL:
2405 case BUILT_IN_ISFINITE:
2406 CASE_FLT_FN (BUILT_IN_FINITE):
2407 case BUILT_IN_FINITED32:
2408 case BUILT_IN_FINITED64:
2409 case BUILT_IN_FINITED128:
2410 case BUILT_IN_ISINFD32:
2411 case BUILT_IN_ISINFD64:
2412 case BUILT_IN_ISINFD128:
2413 /* These builtins have no optabs (yet). */
2414 break;
2415 default:
2416 gcc_unreachable ();
2417 }
2418
2419 /* There's no easy way to detect the case we need to set EDOM. */
2420 if (flag_errno_math && errno_set)
2421 return CODE_FOR_nothing;
2422
2423 /* Optab mode depends on the mode of the input argument. */
2424 mode = TYPE_MODE (TREE_TYPE (arg));
2425
2426 if (builtin_optab)
2427 return optab_handler (builtin_optab, mode);
2428 return CODE_FOR_nothing;
2429 }
2430
2431 /* Expand a call to one of the builtin math functions that operate on
2432 floating point argument and output an integer result (ilogb, isinf,
2433 isnan, etc).
2434 Return 0 if a normal call should be emitted rather than expanding the
2435 function in-line. EXP is the expression that is a call to the builtin
2436 function; if convenient, the result should be placed in TARGET. */
2437
2438 static rtx
2439 expand_builtin_interclass_mathfn (tree exp, rtx target)
2440 {
2441 enum insn_code icode = CODE_FOR_nothing;
2442 rtx op0;
2443 tree fndecl = get_callee_fndecl (exp);
2444 machine_mode mode;
2445 tree arg;
2446
2447 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2448 return NULL_RTX;
2449
2450 arg = CALL_EXPR_ARG (exp, 0);
2451 icode = interclass_mathfn_icode (arg, fndecl);
2452 mode = TYPE_MODE (TREE_TYPE (arg));
2453
2454 if (icode != CODE_FOR_nothing)
2455 {
2456 struct expand_operand ops[1];
2457 rtx_insn *last = get_last_insn ();
2458 tree orig_arg = arg;
2459
2460 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2461 need to expand the argument again. This way, we will not perform
2462 side-effects more the once. */
2463 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2464
2465 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2466
2467 if (mode != GET_MODE (op0))
2468 op0 = convert_to_mode (mode, op0, 0);
2469
2470 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2471 if (maybe_legitimize_operands (icode, 0, 1, ops)
2472 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2473 return ops[0].value;
2474
2475 delete_insns_since (last);
2476 CALL_EXPR_ARG (exp, 0) = orig_arg;
2477 }
2478
2479 return NULL_RTX;
2480 }
2481
2482 /* Expand a call to the builtin sincos math function.
2483 Return NULL_RTX if a normal call should be emitted rather than expanding the
2484 function in-line. EXP is the expression that is a call to the builtin
2485 function. */
2486
2487 static rtx
2488 expand_builtin_sincos (tree exp)
2489 {
2490 rtx op0, op1, op2, target1, target2;
2491 machine_mode mode;
2492 tree arg, sinp, cosp;
2493 int result;
2494 location_t loc = EXPR_LOCATION (exp);
2495 tree alias_type, alias_off;
2496
2497 if (!validate_arglist (exp, REAL_TYPE,
2498 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2499 return NULL_RTX;
2500
2501 arg = CALL_EXPR_ARG (exp, 0);
2502 sinp = CALL_EXPR_ARG (exp, 1);
2503 cosp = CALL_EXPR_ARG (exp, 2);
2504
2505 /* Make a suitable register to place result in. */
2506 mode = TYPE_MODE (TREE_TYPE (arg));
2507
2508 /* Check if sincos insn is available, otherwise emit the call. */
2509 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2510 return NULL_RTX;
2511
2512 target1 = gen_reg_rtx (mode);
2513 target2 = gen_reg_rtx (mode);
2514
2515 op0 = expand_normal (arg);
2516 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2517 alias_off = build_int_cst (alias_type, 0);
2518 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2519 sinp, alias_off));
2520 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2521 cosp, alias_off));
2522
2523 /* Compute into target1 and target2.
2524 Set TARGET to wherever the result comes back. */
2525 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2526 gcc_assert (result);
2527
2528 /* Move target1 and target2 to the memory locations indicated
2529 by op1 and op2. */
2530 emit_move_insn (op1, target1);
2531 emit_move_insn (op2, target2);
2532
2533 return const0_rtx;
2534 }
2535
2536 /* Expand a call to the internal cexpi builtin to the sincos math function.
2537 EXP is the expression that is a call to the builtin function; if convenient,
2538 the result should be placed in TARGET. */
2539
2540 static rtx
2541 expand_builtin_cexpi (tree exp, rtx target)
2542 {
2543 tree fndecl = get_callee_fndecl (exp);
2544 tree arg, type;
2545 machine_mode mode;
2546 rtx op0, op1, op2;
2547 location_t loc = EXPR_LOCATION (exp);
2548
2549 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2550 return NULL_RTX;
2551
2552 arg = CALL_EXPR_ARG (exp, 0);
2553 type = TREE_TYPE (arg);
2554 mode = TYPE_MODE (TREE_TYPE (arg));
2555
2556 /* Try expanding via a sincos optab, fall back to emitting a libcall
2557 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2558 is only generated from sincos, cexp or if we have either of them. */
2559 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2560 {
2561 op1 = gen_reg_rtx (mode);
2562 op2 = gen_reg_rtx (mode);
2563
2564 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2565
2566 /* Compute into op1 and op2. */
2567 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2568 }
2569 else if (targetm.libc_has_function (function_sincos))
2570 {
2571 tree call, fn = NULL_TREE;
2572 tree top1, top2;
2573 rtx op1a, op2a;
2574
2575 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2576 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2577 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2578 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2579 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2580 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2581 else
2582 gcc_unreachable ();
2583
2584 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2585 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2586 op1a = copy_addr_to_reg (XEXP (op1, 0));
2587 op2a = copy_addr_to_reg (XEXP (op2, 0));
2588 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2589 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2590
2591 /* Make sure not to fold the sincos call again. */
2592 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2593 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2594 call, 3, arg, top1, top2));
2595 }
2596 else
2597 {
2598 tree call, fn = NULL_TREE, narg;
2599 tree ctype = build_complex_type (type);
2600
2601 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2602 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2603 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2604 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2605 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2606 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2607 else
2608 gcc_unreachable ();
2609
2610 /* If we don't have a decl for cexp create one. This is the
2611 friendliest fallback if the user calls __builtin_cexpi
2612 without full target C99 function support. */
2613 if (fn == NULL_TREE)
2614 {
2615 tree fntype;
2616 const char *name = NULL;
2617
2618 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2619 name = "cexpf";
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2621 name = "cexp";
2622 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2623 name = "cexpl";
2624
2625 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2626 fn = build_fn_decl (name, fntype);
2627 }
2628
2629 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2630 build_real (type, dconst0), arg);
2631
2632 /* Make sure not to fold the cexp call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2634 return expand_expr (build_call_nary (ctype, call, 1, narg),
2635 target, VOIDmode, EXPAND_NORMAL);
2636 }
2637
2638 /* Now build the proper return type. */
2639 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2640 make_tree (TREE_TYPE (arg), op2),
2641 make_tree (TREE_TYPE (arg), op1)),
2642 target, VOIDmode, EXPAND_NORMAL);
2643 }
2644
2645 /* Conveniently construct a function call expression. FNDECL names the
2646 function to be called, N is the number of arguments, and the "..."
2647 parameters are the argument expressions. Unlike build_call_exr
2648 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2649
2650 static tree
2651 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2652 {
2653 va_list ap;
2654 tree fntype = TREE_TYPE (fndecl);
2655 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2656
2657 va_start (ap, n);
2658 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2659 va_end (ap);
2660 SET_EXPR_LOCATION (fn, loc);
2661 return fn;
2662 }
2663
2664 /* Expand a call to one of the builtin rounding functions gcc defines
2665 as an extension (lfloor and lceil). As these are gcc extensions we
2666 do not need to worry about setting errno to EDOM.
2667 If expanding via optab fails, lower expression to (int)(floor(x)).
2668 EXP is the expression that is a call to the builtin function;
2669 if convenient, the result should be placed in TARGET. */
2670
2671 static rtx
2672 expand_builtin_int_roundingfn (tree exp, rtx target)
2673 {
2674 convert_optab builtin_optab;
2675 rtx op0, tmp;
2676 rtx_insn *insns;
2677 tree fndecl = get_callee_fndecl (exp);
2678 enum built_in_function fallback_fn;
2679 tree fallback_fndecl;
2680 machine_mode mode;
2681 tree arg;
2682
2683 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2684 gcc_unreachable ();
2685
2686 arg = CALL_EXPR_ARG (exp, 0);
2687
2688 switch (DECL_FUNCTION_CODE (fndecl))
2689 {
2690 CASE_FLT_FN (BUILT_IN_ICEIL):
2691 CASE_FLT_FN (BUILT_IN_LCEIL):
2692 CASE_FLT_FN (BUILT_IN_LLCEIL):
2693 builtin_optab = lceil_optab;
2694 fallback_fn = BUILT_IN_CEIL;
2695 break;
2696
2697 CASE_FLT_FN (BUILT_IN_IFLOOR):
2698 CASE_FLT_FN (BUILT_IN_LFLOOR):
2699 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2700 builtin_optab = lfloor_optab;
2701 fallback_fn = BUILT_IN_FLOOR;
2702 break;
2703
2704 default:
2705 gcc_unreachable ();
2706 }
2707
2708 /* Make a suitable register to place result in. */
2709 mode = TYPE_MODE (TREE_TYPE (exp));
2710
2711 target = gen_reg_rtx (mode);
2712
2713 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2714 need to expand the argument again. This way, we will not perform
2715 side-effects more the once. */
2716 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2717
2718 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2719
2720 start_sequence ();
2721
2722 /* Compute into TARGET. */
2723 if (expand_sfix_optab (target, op0, builtin_optab))
2724 {
2725 /* Output the entire sequence. */
2726 insns = get_insns ();
2727 end_sequence ();
2728 emit_insn (insns);
2729 return target;
2730 }
2731
2732 /* If we were unable to expand via the builtin, stop the sequence
2733 (without outputting the insns). */
2734 end_sequence ();
2735
2736 /* Fall back to floating point rounding optab. */
2737 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2738
2739 /* For non-C99 targets we may end up without a fallback fndecl here
2740 if the user called __builtin_lfloor directly. In this case emit
2741 a call to the floor/ceil variants nevertheless. This should result
2742 in the best user experience for not full C99 targets. */
2743 if (fallback_fndecl == NULL_TREE)
2744 {
2745 tree fntype;
2746 const char *name = NULL;
2747
2748 switch (DECL_FUNCTION_CODE (fndecl))
2749 {
2750 case BUILT_IN_ICEIL:
2751 case BUILT_IN_LCEIL:
2752 case BUILT_IN_LLCEIL:
2753 name = "ceil";
2754 break;
2755 case BUILT_IN_ICEILF:
2756 case BUILT_IN_LCEILF:
2757 case BUILT_IN_LLCEILF:
2758 name = "ceilf";
2759 break;
2760 case BUILT_IN_ICEILL:
2761 case BUILT_IN_LCEILL:
2762 case BUILT_IN_LLCEILL:
2763 name = "ceill";
2764 break;
2765 case BUILT_IN_IFLOOR:
2766 case BUILT_IN_LFLOOR:
2767 case BUILT_IN_LLFLOOR:
2768 name = "floor";
2769 break;
2770 case BUILT_IN_IFLOORF:
2771 case BUILT_IN_LFLOORF:
2772 case BUILT_IN_LLFLOORF:
2773 name = "floorf";
2774 break;
2775 case BUILT_IN_IFLOORL:
2776 case BUILT_IN_LFLOORL:
2777 case BUILT_IN_LLFLOORL:
2778 name = "floorl";
2779 break;
2780 default:
2781 gcc_unreachable ();
2782 }
2783
2784 fntype = build_function_type_list (TREE_TYPE (arg),
2785 TREE_TYPE (arg), NULL_TREE);
2786 fallback_fndecl = build_fn_decl (name, fntype);
2787 }
2788
2789 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2790
2791 tmp = expand_normal (exp);
2792 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2793
2794 /* Truncate the result of floating point optab to integer
2795 via expand_fix (). */
2796 target = gen_reg_rtx (mode);
2797 expand_fix (target, tmp, 0);
2798
2799 return target;
2800 }
2801
2802 /* Expand a call to one of the builtin math functions doing integer
2803 conversion (lrint).
2804 Return 0 if a normal call should be emitted rather than expanding the
2805 function in-line. EXP is the expression that is a call to the builtin
2806 function; if convenient, the result should be placed in TARGET. */
2807
2808 static rtx
2809 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2810 {
2811 convert_optab builtin_optab;
2812 rtx op0;
2813 rtx_insn *insns;
2814 tree fndecl = get_callee_fndecl (exp);
2815 tree arg;
2816 machine_mode mode;
2817 enum built_in_function fallback_fn = BUILT_IN_NONE;
2818
2819 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2820 gcc_unreachable ();
2821
2822 arg = CALL_EXPR_ARG (exp, 0);
2823
2824 switch (DECL_FUNCTION_CODE (fndecl))
2825 {
2826 CASE_FLT_FN (BUILT_IN_IRINT):
2827 fallback_fn = BUILT_IN_LRINT;
2828 /* FALLTHRU */
2829 CASE_FLT_FN (BUILT_IN_LRINT):
2830 CASE_FLT_FN (BUILT_IN_LLRINT):
2831 builtin_optab = lrint_optab;
2832 break;
2833
2834 CASE_FLT_FN (BUILT_IN_IROUND):
2835 fallback_fn = BUILT_IN_LROUND;
2836 /* FALLTHRU */
2837 CASE_FLT_FN (BUILT_IN_LROUND):
2838 CASE_FLT_FN (BUILT_IN_LLROUND):
2839 builtin_optab = lround_optab;
2840 break;
2841
2842 default:
2843 gcc_unreachable ();
2844 }
2845
2846 /* There's no easy way to detect the case we need to set EDOM. */
2847 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2848 return NULL_RTX;
2849
2850 /* Make a suitable register to place result in. */
2851 mode = TYPE_MODE (TREE_TYPE (exp));
2852
2853 /* There's no easy way to detect the case we need to set EDOM. */
2854 if (!flag_errno_math)
2855 {
2856 rtx result = gen_reg_rtx (mode);
2857
2858 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2859 need to expand the argument again. This way, we will not perform
2860 side-effects more the once. */
2861 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2862
2863 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2864
2865 start_sequence ();
2866
2867 if (expand_sfix_optab (result, op0, builtin_optab))
2868 {
2869 /* Output the entire sequence. */
2870 insns = get_insns ();
2871 end_sequence ();
2872 emit_insn (insns);
2873 return result;
2874 }
2875
2876 /* If we were unable to expand via the builtin, stop the sequence
2877 (without outputting the insns) and call to the library function
2878 with the stabilized argument list. */
2879 end_sequence ();
2880 }
2881
2882 if (fallback_fn != BUILT_IN_NONE)
2883 {
2884 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2885 targets, (int) round (x) should never be transformed into
2886 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2887 a call to lround in the hope that the target provides at least some
2888 C99 functions. This should result in the best user experience for
2889 not full C99 targets. */
2890 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2891 fallback_fn, 0);
2892
2893 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2894 fallback_fndecl, 1, arg);
2895
2896 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2897 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2898 return convert_to_mode (mode, target, 0);
2899 }
2900
2901 return expand_call (exp, target, target == const0_rtx);
2902 }
2903
2904 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2905 a normal call should be emitted rather than expanding the function
2906 in-line. EXP is the expression that is a call to the builtin
2907 function; if convenient, the result should be placed in TARGET. */
2908
2909 static rtx
2910 expand_builtin_powi (tree exp, rtx target)
2911 {
2912 tree arg0, arg1;
2913 rtx op0, op1;
2914 machine_mode mode;
2915 machine_mode mode2;
2916
2917 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2918 return NULL_RTX;
2919
2920 arg0 = CALL_EXPR_ARG (exp, 0);
2921 arg1 = CALL_EXPR_ARG (exp, 1);
2922 mode = TYPE_MODE (TREE_TYPE (exp));
2923
2924 /* Emit a libcall to libgcc. */
2925
2926 /* Mode of the 2nd argument must match that of an int. */
2927 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2928
2929 if (target == NULL_RTX)
2930 target = gen_reg_rtx (mode);
2931
2932 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2933 if (GET_MODE (op0) != mode)
2934 op0 = convert_to_mode (mode, op0, 0);
2935 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2936 if (GET_MODE (op1) != mode2)
2937 op1 = convert_to_mode (mode2, op1, 0);
2938
2939 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2940 target, LCT_CONST, mode, 2,
2941 op0, mode, op1, mode2);
2942
2943 return target;
2944 }
2945
2946 /* Expand expression EXP which is a call to the strlen builtin. Return
2947 NULL_RTX if we failed the caller should emit a normal call, otherwise
2948 try to get the result in TARGET, if convenient. */
2949
2950 static rtx
2951 expand_builtin_strlen (tree exp, rtx target,
2952 machine_mode target_mode)
2953 {
2954 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2955 return NULL_RTX;
2956 else
2957 {
2958 struct expand_operand ops[4];
2959 rtx pat;
2960 tree len;
2961 tree src = CALL_EXPR_ARG (exp, 0);
2962 rtx src_reg;
2963 rtx_insn *before_strlen;
2964 machine_mode insn_mode = target_mode;
2965 enum insn_code icode = CODE_FOR_nothing;
2966 unsigned int align;
2967
2968 /* If the length can be computed at compile-time, return it. */
2969 len = c_strlen (src, 0);
2970 if (len)
2971 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2972
2973 /* If the length can be computed at compile-time and is constant
2974 integer, but there are side-effects in src, evaluate
2975 src for side-effects, then return len.
2976 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2977 can be optimized into: i++; x = 3; */
2978 len = c_strlen (src, 1);
2979 if (len && TREE_CODE (len) == INTEGER_CST)
2980 {
2981 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2982 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2983 }
2984
2985 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2986
2987 /* If SRC is not a pointer type, don't do this operation inline. */
2988 if (align == 0)
2989 return NULL_RTX;
2990
2991 /* Bail out if we can't compute strlen in the right mode. */
2992 while (insn_mode != VOIDmode)
2993 {
2994 icode = optab_handler (strlen_optab, insn_mode);
2995 if (icode != CODE_FOR_nothing)
2996 break;
2997
2998 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2999 }
3000 if (insn_mode == VOIDmode)
3001 return NULL_RTX;
3002
3003 /* Make a place to hold the source address. We will not expand
3004 the actual source until we are sure that the expansion will
3005 not fail -- there are trees that cannot be expanded twice. */
3006 src_reg = gen_reg_rtx (Pmode);
3007
3008 /* Mark the beginning of the strlen sequence so we can emit the
3009 source operand later. */
3010 before_strlen = get_last_insn ();
3011
3012 create_output_operand (&ops[0], target, insn_mode);
3013 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3014 create_integer_operand (&ops[2], 0);
3015 create_integer_operand (&ops[3], align);
3016 if (!maybe_expand_insn (icode, 4, ops))
3017 return NULL_RTX;
3018
3019 /* Now that we are assured of success, expand the source. */
3020 start_sequence ();
3021 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3022 if (pat != src_reg)
3023 {
3024 #ifdef POINTERS_EXTEND_UNSIGNED
3025 if (GET_MODE (pat) != Pmode)
3026 pat = convert_to_mode (Pmode, pat,
3027 POINTERS_EXTEND_UNSIGNED);
3028 #endif
3029 emit_move_insn (src_reg, pat);
3030 }
3031 pat = get_insns ();
3032 end_sequence ();
3033
3034 if (before_strlen)
3035 emit_insn_after (pat, before_strlen);
3036 else
3037 emit_insn_before (pat, get_insns ());
3038
3039 /* Return the value in the proper mode for this function. */
3040 if (GET_MODE (ops[0].value) == target_mode)
3041 target = ops[0].value;
3042 else if (target != 0)
3043 convert_move (target, ops[0].value, 0);
3044 else
3045 target = convert_to_mode (target_mode, ops[0].value, 0);
3046
3047 return target;
3048 }
3049 }
3050
3051 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3052 bytes from constant string DATA + OFFSET and return it as target
3053 constant. */
3054
3055 static rtx
3056 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3057 machine_mode mode)
3058 {
3059 const char *str = (const char *) data;
3060
3061 gcc_assert (offset >= 0
3062 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3063 <= strlen (str) + 1));
3064
3065 return c_readstr (str + offset, mode);
3066 }
3067
3068 /* LEN specify length of the block of memcpy/memset operation.
3069 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3070 In some cases we can make very likely guess on max size, then we
3071 set it into PROBABLE_MAX_SIZE. */
3072
3073 static void
3074 determine_block_size (tree len, rtx len_rtx,
3075 unsigned HOST_WIDE_INT *min_size,
3076 unsigned HOST_WIDE_INT *max_size,
3077 unsigned HOST_WIDE_INT *probable_max_size)
3078 {
3079 if (CONST_INT_P (len_rtx))
3080 {
3081 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3082 return;
3083 }
3084 else
3085 {
3086 wide_int min, max;
3087 enum value_range_type range_type = VR_UNDEFINED;
3088
3089 /* Determine bounds from the type. */
3090 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3091 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3092 else
3093 *min_size = 0;
3094 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3095 *probable_max_size = *max_size
3096 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3097 else
3098 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3099
3100 if (TREE_CODE (len) == SSA_NAME)
3101 range_type = get_range_info (len, &min, &max);
3102 if (range_type == VR_RANGE)
3103 {
3104 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3105 *min_size = min.to_uhwi ();
3106 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3107 *probable_max_size = *max_size = max.to_uhwi ();
3108 }
3109 else if (range_type == VR_ANTI_RANGE)
3110 {
3111 /* Anti range 0...N lets us to determine minimal size to N+1. */
3112 if (min == 0)
3113 {
3114 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3115 *min_size = max.to_uhwi () + 1;
3116 }
3117 /* Code like
3118
3119 int n;
3120 if (n < 100)
3121 memcpy (a, b, n)
3122
3123 Produce anti range allowing negative values of N. We still
3124 can use the information and make a guess that N is not negative.
3125 */
3126 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3127 *probable_max_size = min.to_uhwi () - 1;
3128 }
3129 }
3130 gcc_checking_assert (*max_size <=
3131 (unsigned HOST_WIDE_INT)
3132 GET_MODE_MASK (GET_MODE (len_rtx)));
3133 }
3134
3135 /* Helper function to do the actual work for expand_builtin_memcpy. */
3136
3137 static rtx
3138 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3139 {
3140 const char *src_str;
3141 unsigned int src_align = get_pointer_alignment (src);
3142 unsigned int dest_align = get_pointer_alignment (dest);
3143 rtx dest_mem, src_mem, dest_addr, len_rtx;
3144 HOST_WIDE_INT expected_size = -1;
3145 unsigned int expected_align = 0;
3146 unsigned HOST_WIDE_INT min_size;
3147 unsigned HOST_WIDE_INT max_size;
3148 unsigned HOST_WIDE_INT probable_max_size;
3149
3150 /* If DEST is not a pointer type, call the normal function. */
3151 if (dest_align == 0)
3152 return NULL_RTX;
3153
3154 /* If either SRC is not a pointer type, don't do this
3155 operation in-line. */
3156 if (src_align == 0)
3157 return NULL_RTX;
3158
3159 if (currently_expanding_gimple_stmt)
3160 stringop_block_profile (currently_expanding_gimple_stmt,
3161 &expected_align, &expected_size);
3162
3163 if (expected_align < dest_align)
3164 expected_align = dest_align;
3165 dest_mem = get_memory_rtx (dest, len);
3166 set_mem_align (dest_mem, dest_align);
3167 len_rtx = expand_normal (len);
3168 determine_block_size (len, len_rtx, &min_size, &max_size,
3169 &probable_max_size);
3170 src_str = c_getstr (src);
3171
3172 /* If SRC is a string constant and block move would be done
3173 by pieces, we can avoid loading the string from memory
3174 and only stored the computed constants. */
3175 if (src_str
3176 && CONST_INT_P (len_rtx)
3177 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3178 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3179 CONST_CAST (char *, src_str),
3180 dest_align, false))
3181 {
3182 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3183 builtin_memcpy_read_str,
3184 CONST_CAST (char *, src_str),
3185 dest_align, false, 0);
3186 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3187 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3188 return dest_mem;
3189 }
3190
3191 src_mem = get_memory_rtx (src, len);
3192 set_mem_align (src_mem, src_align);
3193
3194 /* Copy word part most expediently. */
3195 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3196 CALL_EXPR_TAILCALL (exp)
3197 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3198 expected_align, expected_size,
3199 min_size, max_size, probable_max_size);
3200
3201 if (dest_addr == 0)
3202 {
3203 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3204 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3205 }
3206
3207 return dest_addr;
3208 }
3209
3210 /* Expand a call EXP to the memcpy builtin.
3211 Return NULL_RTX if we failed, the caller should emit a normal call,
3212 otherwise try to get the result in TARGET, if convenient (and in
3213 mode MODE if that's convenient). */
3214
3215 static rtx
3216 expand_builtin_memcpy (tree exp, rtx target)
3217 {
3218 if (!validate_arglist (exp,
3219 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3220 return NULL_RTX;
3221 else
3222 {
3223 tree dest = CALL_EXPR_ARG (exp, 0);
3224 tree src = CALL_EXPR_ARG (exp, 1);
3225 tree len = CALL_EXPR_ARG (exp, 2);
3226 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3227 }
3228 }
3229
3230 /* Expand an instrumented call EXP to the memcpy builtin.
3231 Return NULL_RTX if we failed, the caller should emit a normal call,
3232 otherwise try to get the result in TARGET, if convenient (and in
3233 mode MODE if that's convenient). */
3234
3235 static rtx
3236 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3237 {
3238 if (!validate_arglist (exp,
3239 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3240 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3241 INTEGER_TYPE, VOID_TYPE))
3242 return NULL_RTX;
3243 else
3244 {
3245 tree dest = CALL_EXPR_ARG (exp, 0);
3246 tree src = CALL_EXPR_ARG (exp, 2);
3247 tree len = CALL_EXPR_ARG (exp, 4);
3248 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3249
3250 /* Return src bounds with the result. */
3251 if (res)
3252 {
3253 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3254 expand_normal (CALL_EXPR_ARG (exp, 1)));
3255 res = chkp_join_splitted_slot (res, bnd);
3256 }
3257 return res;
3258 }
3259 }
3260
3261 /* Expand a call EXP to the mempcpy builtin.
3262 Return NULL_RTX if we failed; the caller should emit a normal call,
3263 otherwise try to get the result in TARGET, if convenient (and in
3264 mode MODE if that's convenient). If ENDP is 0 return the
3265 destination pointer, if ENDP is 1 return the end pointer ala
3266 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3267 stpcpy. */
3268
3269 static rtx
3270 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3271 {
3272 if (!validate_arglist (exp,
3273 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3276 {
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 1);
3279 tree len = CALL_EXPR_ARG (exp, 2);
3280 return expand_builtin_mempcpy_args (dest, src, len,
3281 target, mode, /*endp=*/ 1,
3282 exp);
3283 }
3284 }
3285
3286 /* Expand an instrumented call EXP to the mempcpy builtin.
3287 Return NULL_RTX if we failed, the caller should emit a normal call,
3288 otherwise try to get the result in TARGET, if convenient (and in
3289 mode MODE if that's convenient). */
3290
3291 static rtx
3292 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3293 {
3294 if (!validate_arglist (exp,
3295 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3296 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3297 INTEGER_TYPE, VOID_TYPE))
3298 return NULL_RTX;
3299 else
3300 {
3301 tree dest = CALL_EXPR_ARG (exp, 0);
3302 tree src = CALL_EXPR_ARG (exp, 2);
3303 tree len = CALL_EXPR_ARG (exp, 4);
3304 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3305 mode, 1, exp);
3306
3307 /* Return src bounds with the result. */
3308 if (res)
3309 {
3310 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3311 expand_normal (CALL_EXPR_ARG (exp, 1)));
3312 res = chkp_join_splitted_slot (res, bnd);
3313 }
3314 return res;
3315 }
3316 }
3317
3318 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3319 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3320 so that this can also be called without constructing an actual CALL_EXPR.
3321 The other arguments and return value are the same as for
3322 expand_builtin_mempcpy. */
3323
3324 static rtx
3325 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3326 rtx target, machine_mode mode, int endp,
3327 tree orig_exp)
3328 {
3329 tree fndecl = get_callee_fndecl (orig_exp);
3330
3331 /* If return value is ignored, transform mempcpy into memcpy. */
3332 if (target == const0_rtx
3333 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3334 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3335 {
3336 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3337 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3338 dest, src, len);
3339 return expand_expr (result, target, mode, EXPAND_NORMAL);
3340 }
3341 else if (target == const0_rtx
3342 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3343 {
3344 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3345 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3346 dest, src, len);
3347 return expand_expr (result, target, mode, EXPAND_NORMAL);
3348 }
3349 else
3350 {
3351 const char *src_str;
3352 unsigned int src_align = get_pointer_alignment (src);
3353 unsigned int dest_align = get_pointer_alignment (dest);
3354 rtx dest_mem, src_mem, len_rtx;
3355
3356 /* If either SRC or DEST is not a pointer type, don't do this
3357 operation in-line. */
3358 if (dest_align == 0 || src_align == 0)
3359 return NULL_RTX;
3360
3361 /* If LEN is not constant, call the normal function. */
3362 if (! tree_fits_uhwi_p (len))
3363 return NULL_RTX;
3364
3365 len_rtx = expand_normal (len);
3366 src_str = c_getstr (src);
3367
3368 /* If SRC is a string constant and block move would be done
3369 by pieces, we can avoid loading the string from memory
3370 and only stored the computed constants. */
3371 if (src_str
3372 && CONST_INT_P (len_rtx)
3373 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3374 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3375 CONST_CAST (char *, src_str),
3376 dest_align, false))
3377 {
3378 dest_mem = get_memory_rtx (dest, len);
3379 set_mem_align (dest_mem, dest_align);
3380 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3381 builtin_memcpy_read_str,
3382 CONST_CAST (char *, src_str),
3383 dest_align, false, endp);
3384 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3385 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3386 return dest_mem;
3387 }
3388
3389 if (CONST_INT_P (len_rtx)
3390 && can_move_by_pieces (INTVAL (len_rtx),
3391 MIN (dest_align, src_align)))
3392 {
3393 dest_mem = get_memory_rtx (dest, len);
3394 set_mem_align (dest_mem, dest_align);
3395 src_mem = get_memory_rtx (src, len);
3396 set_mem_align (src_mem, src_align);
3397 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3398 MIN (dest_align, src_align), endp);
3399 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3400 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3401 return dest_mem;
3402 }
3403
3404 return NULL_RTX;
3405 }
3406 }
3407
3408 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3409 we failed, the caller should emit a normal call, otherwise try to
3410 get the result in TARGET, if convenient. If ENDP is 0 return the
3411 destination pointer, if ENDP is 1 return the end pointer ala
3412 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3413 stpcpy. */
3414
3415 static rtx
3416 expand_movstr (tree dest, tree src, rtx target, int endp)
3417 {
3418 struct expand_operand ops[3];
3419 rtx dest_mem;
3420 rtx src_mem;
3421
3422 if (!targetm.have_movstr ())
3423 return NULL_RTX;
3424
3425 dest_mem = get_memory_rtx (dest, NULL);
3426 src_mem = get_memory_rtx (src, NULL);
3427 if (!endp)
3428 {
3429 target = force_reg (Pmode, XEXP (dest_mem, 0));
3430 dest_mem = replace_equiv_address (dest_mem, target);
3431 }
3432
3433 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3434 create_fixed_operand (&ops[1], dest_mem);
3435 create_fixed_operand (&ops[2], src_mem);
3436 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3437 return NULL_RTX;
3438
3439 if (endp && target != const0_rtx)
3440 {
3441 target = ops[0].value;
3442 /* movstr is supposed to set end to the address of the NUL
3443 terminator. If the caller requested a mempcpy-like return value,
3444 adjust it. */
3445 if (endp == 1)
3446 {
3447 rtx tem = plus_constant (GET_MODE (target),
3448 gen_lowpart (GET_MODE (target), target), 1);
3449 emit_move_insn (target, force_operand (tem, NULL_RTX));
3450 }
3451 }
3452 return target;
3453 }
3454
3455 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3456 NULL_RTX if we failed the caller should emit a normal call, otherwise
3457 try to get the result in TARGET, if convenient (and in mode MODE if that's
3458 convenient). */
3459
3460 static rtx
3461 expand_builtin_strcpy (tree exp, rtx target)
3462 {
3463 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3464 {
3465 tree dest = CALL_EXPR_ARG (exp, 0);
3466 tree src = CALL_EXPR_ARG (exp, 1);
3467 return expand_builtin_strcpy_args (dest, src, target);
3468 }
3469 return NULL_RTX;
3470 }
3471
3472 /* Helper function to do the actual work for expand_builtin_strcpy. The
3473 arguments to the builtin_strcpy call DEST and SRC are broken out
3474 so that this can also be called without constructing an actual CALL_EXPR.
3475 The other arguments and return value are the same as for
3476 expand_builtin_strcpy. */
3477
3478 static rtx
3479 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3480 {
3481 return expand_movstr (dest, src, target, /*endp=*/0);
3482 }
3483
3484 /* Expand a call EXP to the stpcpy builtin.
3485 Return NULL_RTX if we failed the caller should emit a normal call,
3486 otherwise try to get the result in TARGET, if convenient (and in
3487 mode MODE if that's convenient). */
3488
3489 static rtx
3490 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3491 {
3492 tree dst, src;
3493 location_t loc = EXPR_LOCATION (exp);
3494
3495 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3496 return NULL_RTX;
3497
3498 dst = CALL_EXPR_ARG (exp, 0);
3499 src = CALL_EXPR_ARG (exp, 1);
3500
3501 /* If return value is ignored, transform stpcpy into strcpy. */
3502 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3503 {
3504 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3505 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3506 return expand_expr (result, target, mode, EXPAND_NORMAL);
3507 }
3508 else
3509 {
3510 tree len, lenp1;
3511 rtx ret;
3512
3513 /* Ensure we get an actual string whose length can be evaluated at
3514 compile-time, not an expression containing a string. This is
3515 because the latter will potentially produce pessimized code
3516 when used to produce the return value. */
3517 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3518 return expand_movstr (dst, src, target, /*endp=*/2);
3519
3520 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3521 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3522 target, mode, /*endp=*/2,
3523 exp);
3524
3525 if (ret)
3526 return ret;
3527
3528 if (TREE_CODE (len) == INTEGER_CST)
3529 {
3530 rtx len_rtx = expand_normal (len);
3531
3532 if (CONST_INT_P (len_rtx))
3533 {
3534 ret = expand_builtin_strcpy_args (dst, src, target);
3535
3536 if (ret)
3537 {
3538 if (! target)
3539 {
3540 if (mode != VOIDmode)
3541 target = gen_reg_rtx (mode);
3542 else
3543 target = gen_reg_rtx (GET_MODE (ret));
3544 }
3545 if (GET_MODE (target) != GET_MODE (ret))
3546 ret = gen_lowpart (GET_MODE (target), ret);
3547
3548 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3549 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3550 gcc_assert (ret);
3551
3552 return target;
3553 }
3554 }
3555 }
3556
3557 return expand_movstr (dst, src, target, /*endp=*/2);
3558 }
3559 }
3560
3561 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3562 bytes from constant string DATA + OFFSET and return it as target
3563 constant. */
3564
3565 rtx
3566 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3567 machine_mode mode)
3568 {
3569 const char *str = (const char *) data;
3570
3571 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3572 return const0_rtx;
3573
3574 return c_readstr (str + offset, mode);
3575 }
3576
3577 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3578 NULL_RTX if we failed the caller should emit a normal call. */
3579
3580 static rtx
3581 expand_builtin_strncpy (tree exp, rtx target)
3582 {
3583 location_t loc = EXPR_LOCATION (exp);
3584
3585 if (validate_arglist (exp,
3586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3587 {
3588 tree dest = CALL_EXPR_ARG (exp, 0);
3589 tree src = CALL_EXPR_ARG (exp, 1);
3590 tree len = CALL_EXPR_ARG (exp, 2);
3591 tree slen = c_strlen (src, 1);
3592
3593 /* We must be passed a constant len and src parameter. */
3594 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3595 return NULL_RTX;
3596
3597 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3598
3599 /* We're required to pad with trailing zeros if the requested
3600 len is greater than strlen(s2)+1. In that case try to
3601 use store_by_pieces, if it fails, punt. */
3602 if (tree_int_cst_lt (slen, len))
3603 {
3604 unsigned int dest_align = get_pointer_alignment (dest);
3605 const char *p = c_getstr (src);
3606 rtx dest_mem;
3607
3608 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3609 || !can_store_by_pieces (tree_to_uhwi (len),
3610 builtin_strncpy_read_str,
3611 CONST_CAST (char *, p),
3612 dest_align, false))
3613 return NULL_RTX;
3614
3615 dest_mem = get_memory_rtx (dest, len);
3616 store_by_pieces (dest_mem, tree_to_uhwi (len),
3617 builtin_strncpy_read_str,
3618 CONST_CAST (char *, p), dest_align, false, 0);
3619 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3620 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3621 return dest_mem;
3622 }
3623 }
3624 return NULL_RTX;
3625 }
3626
3627 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3628 bytes from constant string DATA + OFFSET and return it as target
3629 constant. */
3630
3631 rtx
3632 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3633 machine_mode mode)
3634 {
3635 const char *c = (const char *) data;
3636 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3637
3638 memset (p, *c, GET_MODE_SIZE (mode));
3639
3640 return c_readstr (p, mode);
3641 }
3642
3643 /* Callback routine for store_by_pieces. Return the RTL of a register
3644 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3645 char value given in the RTL register data. For example, if mode is
3646 4 bytes wide, return the RTL for 0x01010101*data. */
3647
3648 static rtx
3649 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3650 machine_mode mode)
3651 {
3652 rtx target, coeff;
3653 size_t size;
3654 char *p;
3655
3656 size = GET_MODE_SIZE (mode);
3657 if (size == 1)
3658 return (rtx) data;
3659
3660 p = XALLOCAVEC (char, size);
3661 memset (p, 1, size);
3662 coeff = c_readstr (p, mode);
3663
3664 target = convert_to_mode (mode, (rtx) data, 1);
3665 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3666 return force_reg (mode, target);
3667 }
3668
3669 /* Expand expression EXP, which is a call to the memset builtin. Return
3670 NULL_RTX if we failed the caller should emit a normal call, otherwise
3671 try to get the result in TARGET, if convenient (and in mode MODE if that's
3672 convenient). */
3673
3674 static rtx
3675 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3676 {
3677 if (!validate_arglist (exp,
3678 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3679 return NULL_RTX;
3680 else
3681 {
3682 tree dest = CALL_EXPR_ARG (exp, 0);
3683 tree val = CALL_EXPR_ARG (exp, 1);
3684 tree len = CALL_EXPR_ARG (exp, 2);
3685 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3686 }
3687 }
3688
3689 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3690 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3691 try to get the result in TARGET, if convenient (and in mode MODE if that's
3692 convenient). */
3693
3694 static rtx
3695 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3696 {
3697 if (!validate_arglist (exp,
3698 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3699 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3700 return NULL_RTX;
3701 else
3702 {
3703 tree dest = CALL_EXPR_ARG (exp, 0);
3704 tree val = CALL_EXPR_ARG (exp, 2);
3705 tree len = CALL_EXPR_ARG (exp, 3);
3706 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3707
3708 /* Return src bounds with the result. */
3709 if (res)
3710 {
3711 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3712 expand_normal (CALL_EXPR_ARG (exp, 1)));
3713 res = chkp_join_splitted_slot (res, bnd);
3714 }
3715 return res;
3716 }
3717 }
3718
3719 /* Helper function to do the actual work for expand_builtin_memset. The
3720 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3721 so that this can also be called without constructing an actual CALL_EXPR.
3722 The other arguments and return value are the same as for
3723 expand_builtin_memset. */
3724
3725 static rtx
3726 expand_builtin_memset_args (tree dest, tree val, tree len,
3727 rtx target, machine_mode mode, tree orig_exp)
3728 {
3729 tree fndecl, fn;
3730 enum built_in_function fcode;
3731 machine_mode val_mode;
3732 char c;
3733 unsigned int dest_align;
3734 rtx dest_mem, dest_addr, len_rtx;
3735 HOST_WIDE_INT expected_size = -1;
3736 unsigned int expected_align = 0;
3737 unsigned HOST_WIDE_INT min_size;
3738 unsigned HOST_WIDE_INT max_size;
3739 unsigned HOST_WIDE_INT probable_max_size;
3740
3741 dest_align = get_pointer_alignment (dest);
3742
3743 /* If DEST is not a pointer type, don't do this operation in-line. */
3744 if (dest_align == 0)
3745 return NULL_RTX;
3746
3747 if (currently_expanding_gimple_stmt)
3748 stringop_block_profile (currently_expanding_gimple_stmt,
3749 &expected_align, &expected_size);
3750
3751 if (expected_align < dest_align)
3752 expected_align = dest_align;
3753
3754 /* If the LEN parameter is zero, return DEST. */
3755 if (integer_zerop (len))
3756 {
3757 /* Evaluate and ignore VAL in case it has side-effects. */
3758 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3759 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3760 }
3761
3762 /* Stabilize the arguments in case we fail. */
3763 dest = builtin_save_expr (dest);
3764 val = builtin_save_expr (val);
3765 len = builtin_save_expr (len);
3766
3767 len_rtx = expand_normal (len);
3768 determine_block_size (len, len_rtx, &min_size, &max_size,
3769 &probable_max_size);
3770 dest_mem = get_memory_rtx (dest, len);
3771 val_mode = TYPE_MODE (unsigned_char_type_node);
3772
3773 if (TREE_CODE (val) != INTEGER_CST)
3774 {
3775 rtx val_rtx;
3776
3777 val_rtx = expand_normal (val);
3778 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3779
3780 /* Assume that we can memset by pieces if we can store
3781 * the coefficients by pieces (in the required modes).
3782 * We can't pass builtin_memset_gen_str as that emits RTL. */
3783 c = 1;
3784 if (tree_fits_uhwi_p (len)
3785 && can_store_by_pieces (tree_to_uhwi (len),
3786 builtin_memset_read_str, &c, dest_align,
3787 true))
3788 {
3789 val_rtx = force_reg (val_mode, val_rtx);
3790 store_by_pieces (dest_mem, tree_to_uhwi (len),
3791 builtin_memset_gen_str, val_rtx, dest_align,
3792 true, 0);
3793 }
3794 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3795 dest_align, expected_align,
3796 expected_size, min_size, max_size,
3797 probable_max_size))
3798 goto do_libcall;
3799
3800 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3801 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3802 return dest_mem;
3803 }
3804
3805 if (target_char_cast (val, &c))
3806 goto do_libcall;
3807
3808 if (c)
3809 {
3810 if (tree_fits_uhwi_p (len)
3811 && can_store_by_pieces (tree_to_uhwi (len),
3812 builtin_memset_read_str, &c, dest_align,
3813 true))
3814 store_by_pieces (dest_mem, tree_to_uhwi (len),
3815 builtin_memset_read_str, &c, dest_align, true, 0);
3816 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3817 gen_int_mode (c, val_mode),
3818 dest_align, expected_align,
3819 expected_size, min_size, max_size,
3820 probable_max_size))
3821 goto do_libcall;
3822
3823 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3824 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3825 return dest_mem;
3826 }
3827
3828 set_mem_align (dest_mem, dest_align);
3829 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3830 CALL_EXPR_TAILCALL (orig_exp)
3831 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3832 expected_align, expected_size,
3833 min_size, max_size,
3834 probable_max_size);
3835
3836 if (dest_addr == 0)
3837 {
3838 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3839 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3840 }
3841
3842 return dest_addr;
3843
3844 do_libcall:
3845 fndecl = get_callee_fndecl (orig_exp);
3846 fcode = DECL_FUNCTION_CODE (fndecl);
3847 if (fcode == BUILT_IN_MEMSET
3848 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3849 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3850 dest, val, len);
3851 else if (fcode == BUILT_IN_BZERO)
3852 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3853 dest, len);
3854 else
3855 gcc_unreachable ();
3856 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3857 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3858 return expand_call (fn, target, target == const0_rtx);
3859 }
3860
3861 /* Expand expression EXP, which is a call to the bzero builtin. Return
3862 NULL_RTX if we failed the caller should emit a normal call. */
3863
3864 static rtx
3865 expand_builtin_bzero (tree exp)
3866 {
3867 tree dest, size;
3868 location_t loc = EXPR_LOCATION (exp);
3869
3870 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3871 return NULL_RTX;
3872
3873 dest = CALL_EXPR_ARG (exp, 0);
3874 size = CALL_EXPR_ARG (exp, 1);
3875
3876 /* New argument list transforming bzero(ptr x, int y) to
3877 memset(ptr x, int 0, size_t y). This is done this way
3878 so that if it isn't expanded inline, we fallback to
3879 calling bzero instead of memset. */
3880
3881 return expand_builtin_memset_args (dest, integer_zero_node,
3882 fold_convert_loc (loc,
3883 size_type_node, size),
3884 const0_rtx, VOIDmode, exp);
3885 }
3886
3887 /* Try to expand cmpstr operation ICODE with the given operands.
3888 Return the result rtx on success, otherwise return null. */
3889
3890 static rtx
3891 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3892 HOST_WIDE_INT align)
3893 {
3894 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3895
3896 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3897 target = NULL_RTX;
3898
3899 struct expand_operand ops[4];
3900 create_output_operand (&ops[0], target, insn_mode);
3901 create_fixed_operand (&ops[1], arg1_rtx);
3902 create_fixed_operand (&ops[2], arg2_rtx);
3903 create_integer_operand (&ops[3], align);
3904 if (maybe_expand_insn (icode, 4, ops))
3905 return ops[0].value;
3906 return NULL_RTX;
3907 }
3908
3909 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3910 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3911 otherwise return null. */
3912
3913 static rtx
3914 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3915 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3916 HOST_WIDE_INT align)
3917 {
3918 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3919
3920 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3921 target = NULL_RTX;
3922
3923 struct expand_operand ops[5];
3924 create_output_operand (&ops[0], target, insn_mode);
3925 create_fixed_operand (&ops[1], arg1_rtx);
3926 create_fixed_operand (&ops[2], arg2_rtx);
3927 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3928 TYPE_UNSIGNED (arg3_type));
3929 create_integer_operand (&ops[4], align);
3930 if (maybe_expand_insn (icode, 5, ops))
3931 return ops[0].value;
3932 return NULL_RTX;
3933 }
3934
3935 /* Expand expression EXP, which is a call to the memcmp built-in function.
3936 Return NULL_RTX if we failed and the caller should emit a normal call,
3937 otherwise try to get the result in TARGET, if convenient. */
3938
3939 static rtx
3940 expand_builtin_memcmp (tree exp, rtx target)
3941 {
3942 if (!validate_arglist (exp,
3943 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3944 return NULL_RTX;
3945
3946 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3947 implementing memcmp because it will stop if it encounters two
3948 zero bytes. */
3949 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3950 if (icode == CODE_FOR_nothing)
3951 return NULL_RTX;
3952
3953 tree arg1 = CALL_EXPR_ARG (exp, 0);
3954 tree arg2 = CALL_EXPR_ARG (exp, 1);
3955 tree len = CALL_EXPR_ARG (exp, 2);
3956
3957 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3958 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3959
3960 /* If we don't have POINTER_TYPE, call the function. */
3961 if (arg1_align == 0 || arg2_align == 0)
3962 return NULL_RTX;
3963
3964 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3965 location_t loc = EXPR_LOCATION (exp);
3966 rtx arg1_rtx = get_memory_rtx (arg1, len);
3967 rtx arg2_rtx = get_memory_rtx (arg2, len);
3968 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3969
3970 /* Set MEM_SIZE as appropriate. */
3971 if (CONST_INT_P (arg3_rtx))
3972 {
3973 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3974 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3975 }
3976
3977 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3978 TREE_TYPE (len), arg3_rtx,
3979 MIN (arg1_align, arg2_align));
3980 if (result)
3981 {
3982 /* Return the value in the proper mode for this function. */
3983 if (GET_MODE (result) == mode)
3984 return result;
3985
3986 if (target != 0)
3987 {
3988 convert_move (target, result, 0);
3989 return target;
3990 }
3991
3992 return convert_to_mode (mode, result, 0);
3993 }
3994
3995 result = target;
3996 if (! (result != 0
3997 && REG_P (result) && GET_MODE (result) == mode
3998 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3999 result = gen_reg_rtx (mode);
4000
4001 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4002 TYPE_MODE (integer_type_node), 3,
4003 XEXP (arg1_rtx, 0), Pmode,
4004 XEXP (arg2_rtx, 0), Pmode,
4005 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4006 TYPE_UNSIGNED (sizetype)),
4007 TYPE_MODE (sizetype));
4008 return result;
4009 }
4010
4011 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4012 if we failed the caller should emit a normal call, otherwise try to get
4013 the result in TARGET, if convenient. */
4014
4015 static rtx
4016 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4017 {
4018 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4019 return NULL_RTX;
4020
4021 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4022 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4023 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4024 {
4025 rtx arg1_rtx, arg2_rtx;
4026 tree fndecl, fn;
4027 tree arg1 = CALL_EXPR_ARG (exp, 0);
4028 tree arg2 = CALL_EXPR_ARG (exp, 1);
4029 rtx result = NULL_RTX;
4030
4031 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4032 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4033
4034 /* If we don't have POINTER_TYPE, call the function. */
4035 if (arg1_align == 0 || arg2_align == 0)
4036 return NULL_RTX;
4037
4038 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4039 arg1 = builtin_save_expr (arg1);
4040 arg2 = builtin_save_expr (arg2);
4041
4042 arg1_rtx = get_memory_rtx (arg1, NULL);
4043 arg2_rtx = get_memory_rtx (arg2, NULL);
4044
4045 /* Try to call cmpstrsi. */
4046 if (cmpstr_icode != CODE_FOR_nothing)
4047 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4048 MIN (arg1_align, arg2_align));
4049
4050 /* Try to determine at least one length and call cmpstrnsi. */
4051 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4052 {
4053 tree len;
4054 rtx arg3_rtx;
4055
4056 tree len1 = c_strlen (arg1, 1);
4057 tree len2 = c_strlen (arg2, 1);
4058
4059 if (len1)
4060 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4061 if (len2)
4062 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4063
4064 /* If we don't have a constant length for the first, use the length
4065 of the second, if we know it. We don't require a constant for
4066 this case; some cost analysis could be done if both are available
4067 but neither is constant. For now, assume they're equally cheap,
4068 unless one has side effects. If both strings have constant lengths,
4069 use the smaller. */
4070
4071 if (!len1)
4072 len = len2;
4073 else if (!len2)
4074 len = len1;
4075 else if (TREE_SIDE_EFFECTS (len1))
4076 len = len2;
4077 else if (TREE_SIDE_EFFECTS (len2))
4078 len = len1;
4079 else if (TREE_CODE (len1) != INTEGER_CST)
4080 len = len2;
4081 else if (TREE_CODE (len2) != INTEGER_CST)
4082 len = len1;
4083 else if (tree_int_cst_lt (len1, len2))
4084 len = len1;
4085 else
4086 len = len2;
4087
4088 /* If both arguments have side effects, we cannot optimize. */
4089 if (len && !TREE_SIDE_EFFECTS (len))
4090 {
4091 arg3_rtx = expand_normal (len);
4092 result = expand_cmpstrn_or_cmpmem
4093 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4094 arg3_rtx, MIN (arg1_align, arg2_align));
4095 }
4096 }
4097
4098 if (result)
4099 {
4100 /* Return the value in the proper mode for this function. */
4101 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4102 if (GET_MODE (result) == mode)
4103 return result;
4104 if (target == 0)
4105 return convert_to_mode (mode, result, 0);
4106 convert_move (target, result, 0);
4107 return target;
4108 }
4109
4110 /* Expand the library call ourselves using a stabilized argument
4111 list to avoid re-evaluating the function's arguments twice. */
4112 fndecl = get_callee_fndecl (exp);
4113 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4114 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4115 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4116 return expand_call (fn, target, target == const0_rtx);
4117 }
4118 return NULL_RTX;
4119 }
4120
4121 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4122 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4123 the result in TARGET, if convenient. */
4124
4125 static rtx
4126 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4127 ATTRIBUTE_UNUSED machine_mode mode)
4128 {
4129 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4130
4131 if (!validate_arglist (exp,
4132 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4133 return NULL_RTX;
4134
4135 /* If c_strlen can determine an expression for one of the string
4136 lengths, and it doesn't have side effects, then emit cmpstrnsi
4137 using length MIN(strlen(string)+1, arg3). */
4138 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4139 if (cmpstrn_icode != CODE_FOR_nothing)
4140 {
4141 tree len, len1, len2;
4142 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4143 rtx result;
4144 tree fndecl, fn;
4145 tree arg1 = CALL_EXPR_ARG (exp, 0);
4146 tree arg2 = CALL_EXPR_ARG (exp, 1);
4147 tree arg3 = CALL_EXPR_ARG (exp, 2);
4148
4149 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4150 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4151
4152 len1 = c_strlen (arg1, 1);
4153 len2 = c_strlen (arg2, 1);
4154
4155 if (len1)
4156 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4157 if (len2)
4158 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4159
4160 /* If we don't have a constant length for the first, use the length
4161 of the second, if we know it. We don't require a constant for
4162 this case; some cost analysis could be done if both are available
4163 but neither is constant. For now, assume they're equally cheap,
4164 unless one has side effects. If both strings have constant lengths,
4165 use the smaller. */
4166
4167 if (!len1)
4168 len = len2;
4169 else if (!len2)
4170 len = len1;
4171 else if (TREE_SIDE_EFFECTS (len1))
4172 len = len2;
4173 else if (TREE_SIDE_EFFECTS (len2))
4174 len = len1;
4175 else if (TREE_CODE (len1) != INTEGER_CST)
4176 len = len2;
4177 else if (TREE_CODE (len2) != INTEGER_CST)
4178 len = len1;
4179 else if (tree_int_cst_lt (len1, len2))
4180 len = len1;
4181 else
4182 len = len2;
4183
4184 /* If both arguments have side effects, we cannot optimize. */
4185 if (!len || TREE_SIDE_EFFECTS (len))
4186 return NULL_RTX;
4187
4188 /* The actual new length parameter is MIN(len,arg3). */
4189 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4190 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4191
4192 /* If we don't have POINTER_TYPE, call the function. */
4193 if (arg1_align == 0 || arg2_align == 0)
4194 return NULL_RTX;
4195
4196 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4197 arg1 = builtin_save_expr (arg1);
4198 arg2 = builtin_save_expr (arg2);
4199 len = builtin_save_expr (len);
4200
4201 arg1_rtx = get_memory_rtx (arg1, len);
4202 arg2_rtx = get_memory_rtx (arg2, len);
4203 arg3_rtx = expand_normal (len);
4204 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4205 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4206 MIN (arg1_align, arg2_align));
4207 if (result)
4208 {
4209 /* Return the value in the proper mode for this function. */
4210 mode = TYPE_MODE (TREE_TYPE (exp));
4211 if (GET_MODE (result) == mode)
4212 return result;
4213 if (target == 0)
4214 return convert_to_mode (mode, result, 0);
4215 convert_move (target, result, 0);
4216 return target;
4217 }
4218
4219 /* Expand the library call ourselves using a stabilized argument
4220 list to avoid re-evaluating the function's arguments twice. */
4221 fndecl = get_callee_fndecl (exp);
4222 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4223 arg1, arg2, len);
4224 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4225 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4226 return expand_call (fn, target, target == const0_rtx);
4227 }
4228 return NULL_RTX;
4229 }
4230
4231 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4232 if that's convenient. */
4233
4234 rtx
4235 expand_builtin_saveregs (void)
4236 {
4237 rtx val;
4238 rtx_insn *seq;
4239
4240 /* Don't do __builtin_saveregs more than once in a function.
4241 Save the result of the first call and reuse it. */
4242 if (saveregs_value != 0)
4243 return saveregs_value;
4244
4245 /* When this function is called, it means that registers must be
4246 saved on entry to this function. So we migrate the call to the
4247 first insn of this function. */
4248
4249 start_sequence ();
4250
4251 /* Do whatever the machine needs done in this case. */
4252 val = targetm.calls.expand_builtin_saveregs ();
4253
4254 seq = get_insns ();
4255 end_sequence ();
4256
4257 saveregs_value = val;
4258
4259 /* Put the insns after the NOTE that starts the function. If this
4260 is inside a start_sequence, make the outer-level insn chain current, so
4261 the code is placed at the start of the function. */
4262 push_topmost_sequence ();
4263 emit_insn_after (seq, entry_of_function ());
4264 pop_topmost_sequence ();
4265
4266 return val;
4267 }
4268
4269 /* Expand a call to __builtin_next_arg. */
4270
4271 static rtx
4272 expand_builtin_next_arg (void)
4273 {
4274 /* Checking arguments is already done in fold_builtin_next_arg
4275 that must be called before this function. */
4276 return expand_binop (ptr_mode, add_optab,
4277 crtl->args.internal_arg_pointer,
4278 crtl->args.arg_offset_rtx,
4279 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4280 }
4281
4282 /* Make it easier for the backends by protecting the valist argument
4283 from multiple evaluations. */
4284
4285 static tree
4286 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4287 {
4288 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4289
4290 /* The current way of determining the type of valist is completely
4291 bogus. We should have the information on the va builtin instead. */
4292 if (!vatype)
4293 vatype = targetm.fn_abi_va_list (cfun->decl);
4294
4295 if (TREE_CODE (vatype) == ARRAY_TYPE)
4296 {
4297 if (TREE_SIDE_EFFECTS (valist))
4298 valist = save_expr (valist);
4299
4300 /* For this case, the backends will be expecting a pointer to
4301 vatype, but it's possible we've actually been given an array
4302 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4303 So fix it. */
4304 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4305 {
4306 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4307 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4308 }
4309 }
4310 else
4311 {
4312 tree pt = build_pointer_type (vatype);
4313
4314 if (! needs_lvalue)
4315 {
4316 if (! TREE_SIDE_EFFECTS (valist))
4317 return valist;
4318
4319 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4320 TREE_SIDE_EFFECTS (valist) = 1;
4321 }
4322
4323 if (TREE_SIDE_EFFECTS (valist))
4324 valist = save_expr (valist);
4325 valist = fold_build2_loc (loc, MEM_REF,
4326 vatype, valist, build_int_cst (pt, 0));
4327 }
4328
4329 return valist;
4330 }
4331
4332 /* The "standard" definition of va_list is void*. */
4333
4334 tree
4335 std_build_builtin_va_list (void)
4336 {
4337 return ptr_type_node;
4338 }
4339
4340 /* The "standard" abi va_list is va_list_type_node. */
4341
4342 tree
4343 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4344 {
4345 return va_list_type_node;
4346 }
4347
4348 /* The "standard" type of va_list is va_list_type_node. */
4349
4350 tree
4351 std_canonical_va_list_type (tree type)
4352 {
4353 tree wtype, htype;
4354
4355 if (INDIRECT_REF_P (type))
4356 type = TREE_TYPE (type);
4357 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4358 type = TREE_TYPE (type);
4359 wtype = va_list_type_node;
4360 htype = type;
4361 /* Treat structure va_list types. */
4362 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4363 htype = TREE_TYPE (htype);
4364 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4365 {
4366 /* If va_list is an array type, the argument may have decayed
4367 to a pointer type, e.g. by being passed to another function.
4368 In that case, unwrap both types so that we can compare the
4369 underlying records. */
4370 if (TREE_CODE (htype) == ARRAY_TYPE
4371 || POINTER_TYPE_P (htype))
4372 {
4373 wtype = TREE_TYPE (wtype);
4374 htype = TREE_TYPE (htype);
4375 }
4376 }
4377 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4378 return va_list_type_node;
4379
4380 return NULL_TREE;
4381 }
4382
4383 /* The "standard" implementation of va_start: just assign `nextarg' to
4384 the variable. */
4385
4386 void
4387 std_expand_builtin_va_start (tree valist, rtx nextarg)
4388 {
4389 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4390 convert_move (va_r, nextarg, 0);
4391
4392 /* We do not have any valid bounds for the pointer, so
4393 just store zero bounds for it. */
4394 if (chkp_function_instrumented_p (current_function_decl))
4395 chkp_expand_bounds_reset_for_mem (valist,
4396 make_tree (TREE_TYPE (valist),
4397 nextarg));
4398 }
4399
4400 /* Expand EXP, a call to __builtin_va_start. */
4401
4402 static rtx
4403 expand_builtin_va_start (tree exp)
4404 {
4405 rtx nextarg;
4406 tree valist;
4407 location_t loc = EXPR_LOCATION (exp);
4408
4409 if (call_expr_nargs (exp) < 2)
4410 {
4411 error_at (loc, "too few arguments to function %<va_start%>");
4412 return const0_rtx;
4413 }
4414
4415 if (fold_builtin_next_arg (exp, true))
4416 return const0_rtx;
4417
4418 nextarg = expand_builtin_next_arg ();
4419 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4420
4421 if (targetm.expand_builtin_va_start)
4422 targetm.expand_builtin_va_start (valist, nextarg);
4423 else
4424 std_expand_builtin_va_start (valist, nextarg);
4425
4426 return const0_rtx;
4427 }
4428
4429 /* Expand EXP, a call to __builtin_va_end. */
4430
4431 static rtx
4432 expand_builtin_va_end (tree exp)
4433 {
4434 tree valist = CALL_EXPR_ARG (exp, 0);
4435
4436 /* Evaluate for side effects, if needed. I hate macros that don't
4437 do that. */
4438 if (TREE_SIDE_EFFECTS (valist))
4439 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4440
4441 return const0_rtx;
4442 }
4443
4444 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4445 builtin rather than just as an assignment in stdarg.h because of the
4446 nastiness of array-type va_list types. */
4447
4448 static rtx
4449 expand_builtin_va_copy (tree exp)
4450 {
4451 tree dst, src, t;
4452 location_t loc = EXPR_LOCATION (exp);
4453
4454 dst = CALL_EXPR_ARG (exp, 0);
4455 src = CALL_EXPR_ARG (exp, 1);
4456
4457 dst = stabilize_va_list_loc (loc, dst, 1);
4458 src = stabilize_va_list_loc (loc, src, 0);
4459
4460 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4461
4462 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4463 {
4464 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4465 TREE_SIDE_EFFECTS (t) = 1;
4466 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4467 }
4468 else
4469 {
4470 rtx dstb, srcb, size;
4471
4472 /* Evaluate to pointers. */
4473 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4474 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4475 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4476 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4477
4478 dstb = convert_memory_address (Pmode, dstb);
4479 srcb = convert_memory_address (Pmode, srcb);
4480
4481 /* "Dereference" to BLKmode memories. */
4482 dstb = gen_rtx_MEM (BLKmode, dstb);
4483 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4484 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4485 srcb = gen_rtx_MEM (BLKmode, srcb);
4486 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4487 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4488
4489 /* Copy. */
4490 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4491 }
4492
4493 return const0_rtx;
4494 }
4495
4496 /* Expand a call to one of the builtin functions __builtin_frame_address or
4497 __builtin_return_address. */
4498
4499 static rtx
4500 expand_builtin_frame_address (tree fndecl, tree exp)
4501 {
4502 /* The argument must be a nonnegative integer constant.
4503 It counts the number of frames to scan up the stack.
4504 The value is either the frame pointer value or the return
4505 address saved in that frame. */
4506 if (call_expr_nargs (exp) == 0)
4507 /* Warning about missing arg was already issued. */
4508 return const0_rtx;
4509 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4510 {
4511 error ("invalid argument to %qD", fndecl);
4512 return const0_rtx;
4513 }
4514 else
4515 {
4516 /* Number of frames to scan up the stack. */
4517 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4518
4519 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4520
4521 /* Some ports cannot access arbitrary stack frames. */
4522 if (tem == NULL)
4523 {
4524 warning (0, "unsupported argument to %qD", fndecl);
4525 return const0_rtx;
4526 }
4527
4528 if (count)
4529 {
4530 /* Warn since no effort is made to ensure that any frame
4531 beyond the current one exists or can be safely reached. */
4532 warning (OPT_Wframe_address, "calling %qD with "
4533 "a nonzero argument is unsafe", fndecl);
4534 }
4535
4536 /* For __builtin_frame_address, return what we've got. */
4537 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4538 return tem;
4539
4540 if (!REG_P (tem)
4541 && ! CONSTANT_P (tem))
4542 tem = copy_addr_to_reg (tem);
4543 return tem;
4544 }
4545 }
4546
4547 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4548 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4549 is the same as for allocate_dynamic_stack_space. */
4550
4551 static rtx
4552 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4553 {
4554 rtx op0;
4555 rtx result;
4556 bool valid_arglist;
4557 unsigned int align;
4558 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4559 == BUILT_IN_ALLOCA_WITH_ALIGN);
4560
4561 valid_arglist
4562 = (alloca_with_align
4563 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4564 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4565
4566 if (!valid_arglist)
4567 return NULL_RTX;
4568
4569 /* Compute the argument. */
4570 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4571
4572 /* Compute the alignment. */
4573 align = (alloca_with_align
4574 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4575 : BIGGEST_ALIGNMENT);
4576
4577 /* Allocate the desired space. */
4578 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4579 result = convert_memory_address (ptr_mode, result);
4580
4581 return result;
4582 }
4583
4584 /* Expand a call to bswap builtin in EXP.
4585 Return NULL_RTX if a normal call should be emitted rather than expanding the
4586 function in-line. If convenient, the result should be placed in TARGET.
4587 SUBTARGET may be used as the target for computing one of EXP's operands. */
4588
4589 static rtx
4590 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4591 rtx subtarget)
4592 {
4593 tree arg;
4594 rtx op0;
4595
4596 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4597 return NULL_RTX;
4598
4599 arg = CALL_EXPR_ARG (exp, 0);
4600 op0 = expand_expr (arg,
4601 subtarget && GET_MODE (subtarget) == target_mode
4602 ? subtarget : NULL_RTX,
4603 target_mode, EXPAND_NORMAL);
4604 if (GET_MODE (op0) != target_mode)
4605 op0 = convert_to_mode (target_mode, op0, 1);
4606
4607 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4608
4609 gcc_assert (target);
4610
4611 return convert_to_mode (target_mode, target, 1);
4612 }
4613
4614 /* Expand a call to a unary builtin in EXP.
4615 Return NULL_RTX if a normal call should be emitted rather than expanding the
4616 function in-line. If convenient, the result should be placed in TARGET.
4617 SUBTARGET may be used as the target for computing one of EXP's operands. */
4618
4619 static rtx
4620 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4621 rtx subtarget, optab op_optab)
4622 {
4623 rtx op0;
4624
4625 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4626 return NULL_RTX;
4627
4628 /* Compute the argument. */
4629 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4630 (subtarget
4631 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4632 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4633 VOIDmode, EXPAND_NORMAL);
4634 /* Compute op, into TARGET if possible.
4635 Set TARGET to wherever the result comes back. */
4636 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4637 op_optab, op0, target, op_optab != clrsb_optab);
4638 gcc_assert (target);
4639
4640 return convert_to_mode (target_mode, target, 0);
4641 }
4642
4643 /* Expand a call to __builtin_expect. We just return our argument
4644 as the builtin_expect semantic should've been already executed by
4645 tree branch prediction pass. */
4646
4647 static rtx
4648 expand_builtin_expect (tree exp, rtx target)
4649 {
4650 tree arg;
4651
4652 if (call_expr_nargs (exp) < 2)
4653 return const0_rtx;
4654 arg = CALL_EXPR_ARG (exp, 0);
4655
4656 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4657 /* When guessing was done, the hints should be already stripped away. */
4658 gcc_assert (!flag_guess_branch_prob
4659 || optimize == 0 || seen_error ());
4660 return target;
4661 }
4662
4663 /* Expand a call to __builtin_assume_aligned. We just return our first
4664 argument as the builtin_assume_aligned semantic should've been already
4665 executed by CCP. */
4666
4667 static rtx
4668 expand_builtin_assume_aligned (tree exp, rtx target)
4669 {
4670 if (call_expr_nargs (exp) < 2)
4671 return const0_rtx;
4672 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4673 EXPAND_NORMAL);
4674 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4675 && (call_expr_nargs (exp) < 3
4676 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4677 return target;
4678 }
4679
4680 void
4681 expand_builtin_trap (void)
4682 {
4683 if (targetm.have_trap ())
4684 {
4685 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4686 /* For trap insns when not accumulating outgoing args force
4687 REG_ARGS_SIZE note to prevent crossjumping of calls with
4688 different args sizes. */
4689 if (!ACCUMULATE_OUTGOING_ARGS)
4690 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4691 }
4692 else
4693 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4694 emit_barrier ();
4695 }
4696
4697 /* Expand a call to __builtin_unreachable. We do nothing except emit
4698 a barrier saying that control flow will not pass here.
4699
4700 It is the responsibility of the program being compiled to ensure
4701 that control flow does never reach __builtin_unreachable. */
4702 static void
4703 expand_builtin_unreachable (void)
4704 {
4705 emit_barrier ();
4706 }
4707
4708 /* Expand EXP, a call to fabs, fabsf or fabsl.
4709 Return NULL_RTX if a normal call should be emitted rather than expanding
4710 the function inline. If convenient, the result should be placed
4711 in TARGET. SUBTARGET may be used as the target for computing
4712 the operand. */
4713
4714 static rtx
4715 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4716 {
4717 machine_mode mode;
4718 tree arg;
4719 rtx op0;
4720
4721 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4722 return NULL_RTX;
4723
4724 arg = CALL_EXPR_ARG (exp, 0);
4725 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4726 mode = TYPE_MODE (TREE_TYPE (arg));
4727 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4728 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4729 }
4730
4731 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4732 Return NULL is a normal call should be emitted rather than expanding the
4733 function inline. If convenient, the result should be placed in TARGET.
4734 SUBTARGET may be used as the target for computing the operand. */
4735
4736 static rtx
4737 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4738 {
4739 rtx op0, op1;
4740 tree arg;
4741
4742 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4743 return NULL_RTX;
4744
4745 arg = CALL_EXPR_ARG (exp, 0);
4746 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4747
4748 arg = CALL_EXPR_ARG (exp, 1);
4749 op1 = expand_normal (arg);
4750
4751 return expand_copysign (op0, op1, target);
4752 }
4753
4754 /* Expand a call to __builtin___clear_cache. */
4755
4756 static rtx
4757 expand_builtin___clear_cache (tree exp)
4758 {
4759 if (!targetm.code_for_clear_cache)
4760 {
4761 #ifdef CLEAR_INSN_CACHE
4762 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4763 does something. Just do the default expansion to a call to
4764 __clear_cache(). */
4765 return NULL_RTX;
4766 #else
4767 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4768 does nothing. There is no need to call it. Do nothing. */
4769 return const0_rtx;
4770 #endif /* CLEAR_INSN_CACHE */
4771 }
4772
4773 /* We have a "clear_cache" insn, and it will handle everything. */
4774 tree begin, end;
4775 rtx begin_rtx, end_rtx;
4776
4777 /* We must not expand to a library call. If we did, any
4778 fallback library function in libgcc that might contain a call to
4779 __builtin___clear_cache() would recurse infinitely. */
4780 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4781 {
4782 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4783 return const0_rtx;
4784 }
4785
4786 if (targetm.have_clear_cache ())
4787 {
4788 struct expand_operand ops[2];
4789
4790 begin = CALL_EXPR_ARG (exp, 0);
4791 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4792
4793 end = CALL_EXPR_ARG (exp, 1);
4794 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4795
4796 create_address_operand (&ops[0], begin_rtx);
4797 create_address_operand (&ops[1], end_rtx);
4798 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4799 return const0_rtx;
4800 }
4801 return const0_rtx;
4802 }
4803
4804 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4805
4806 static rtx
4807 round_trampoline_addr (rtx tramp)
4808 {
4809 rtx temp, addend, mask;
4810
4811 /* If we don't need too much alignment, we'll have been guaranteed
4812 proper alignment by get_trampoline_type. */
4813 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4814 return tramp;
4815
4816 /* Round address up to desired boundary. */
4817 temp = gen_reg_rtx (Pmode);
4818 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4819 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4820
4821 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4822 temp, 0, OPTAB_LIB_WIDEN);
4823 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4824 temp, 0, OPTAB_LIB_WIDEN);
4825
4826 return tramp;
4827 }
4828
4829 static rtx
4830 expand_builtin_init_trampoline (tree exp, bool onstack)
4831 {
4832 tree t_tramp, t_func, t_chain;
4833 rtx m_tramp, r_tramp, r_chain, tmp;
4834
4835 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4836 POINTER_TYPE, VOID_TYPE))
4837 return NULL_RTX;
4838
4839 t_tramp = CALL_EXPR_ARG (exp, 0);
4840 t_func = CALL_EXPR_ARG (exp, 1);
4841 t_chain = CALL_EXPR_ARG (exp, 2);
4842
4843 r_tramp = expand_normal (t_tramp);
4844 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4845 MEM_NOTRAP_P (m_tramp) = 1;
4846
4847 /* If ONSTACK, the TRAMP argument should be the address of a field
4848 within the local function's FRAME decl. Either way, let's see if
4849 we can fill in the MEM_ATTRs for this memory. */
4850 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4851 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4852
4853 /* Creator of a heap trampoline is responsible for making sure the
4854 address is aligned to at least STACK_BOUNDARY. Normally malloc
4855 will ensure this anyhow. */
4856 tmp = round_trampoline_addr (r_tramp);
4857 if (tmp != r_tramp)
4858 {
4859 m_tramp = change_address (m_tramp, BLKmode, tmp);
4860 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4861 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4862 }
4863
4864 /* The FUNC argument should be the address of the nested function.
4865 Extract the actual function decl to pass to the hook. */
4866 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4867 t_func = TREE_OPERAND (t_func, 0);
4868 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4869
4870 r_chain = expand_normal (t_chain);
4871
4872 /* Generate insns to initialize the trampoline. */
4873 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4874
4875 if (onstack)
4876 {
4877 trampolines_created = 1;
4878
4879 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4880 "trampoline generated for nested function %qD", t_func);
4881 }
4882
4883 return const0_rtx;
4884 }
4885
4886 static rtx
4887 expand_builtin_adjust_trampoline (tree exp)
4888 {
4889 rtx tramp;
4890
4891 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4892 return NULL_RTX;
4893
4894 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4895 tramp = round_trampoline_addr (tramp);
4896 if (targetm.calls.trampoline_adjust_address)
4897 tramp = targetm.calls.trampoline_adjust_address (tramp);
4898
4899 return tramp;
4900 }
4901
4902 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4903 function. The function first checks whether the back end provides
4904 an insn to implement signbit for the respective mode. If not, it
4905 checks whether the floating point format of the value is such that
4906 the sign bit can be extracted. If that is not the case, error out.
4907 EXP is the expression that is a call to the builtin function; if
4908 convenient, the result should be placed in TARGET. */
4909 static rtx
4910 expand_builtin_signbit (tree exp, rtx target)
4911 {
4912 const struct real_format *fmt;
4913 machine_mode fmode, imode, rmode;
4914 tree arg;
4915 int word, bitpos;
4916 enum insn_code icode;
4917 rtx temp;
4918 location_t loc = EXPR_LOCATION (exp);
4919
4920 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4921 return NULL_RTX;
4922
4923 arg = CALL_EXPR_ARG (exp, 0);
4924 fmode = TYPE_MODE (TREE_TYPE (arg));
4925 rmode = TYPE_MODE (TREE_TYPE (exp));
4926 fmt = REAL_MODE_FORMAT (fmode);
4927
4928 arg = builtin_save_expr (arg);
4929
4930 /* Expand the argument yielding a RTX expression. */
4931 temp = expand_normal (arg);
4932
4933 /* Check if the back end provides an insn that handles signbit for the
4934 argument's mode. */
4935 icode = optab_handler (signbit_optab, fmode);
4936 if (icode != CODE_FOR_nothing)
4937 {
4938 rtx_insn *last = get_last_insn ();
4939 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4940 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4941 return target;
4942 delete_insns_since (last);
4943 }
4944
4945 /* For floating point formats without a sign bit, implement signbit
4946 as "ARG < 0.0". */
4947 bitpos = fmt->signbit_ro;
4948 if (bitpos < 0)
4949 {
4950 /* But we can't do this if the format supports signed zero. */
4951 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4952
4953 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4954 build_real (TREE_TYPE (arg), dconst0));
4955 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4956 }
4957
4958 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4959 {
4960 imode = int_mode_for_mode (fmode);
4961 gcc_assert (imode != BLKmode);
4962 temp = gen_lowpart (imode, temp);
4963 }
4964 else
4965 {
4966 imode = word_mode;
4967 /* Handle targets with different FP word orders. */
4968 if (FLOAT_WORDS_BIG_ENDIAN)
4969 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4970 else
4971 word = bitpos / BITS_PER_WORD;
4972 temp = operand_subword_force (temp, word, fmode);
4973 bitpos = bitpos % BITS_PER_WORD;
4974 }
4975
4976 /* Force the intermediate word_mode (or narrower) result into a
4977 register. This avoids attempting to create paradoxical SUBREGs
4978 of floating point modes below. */
4979 temp = force_reg (imode, temp);
4980
4981 /* If the bitpos is within the "result mode" lowpart, the operation
4982 can be implement with a single bitwise AND. Otherwise, we need
4983 a right shift and an AND. */
4984
4985 if (bitpos < GET_MODE_BITSIZE (rmode))
4986 {
4987 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4988
4989 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4990 temp = gen_lowpart (rmode, temp);
4991 temp = expand_binop (rmode, and_optab, temp,
4992 immed_wide_int_const (mask, rmode),
4993 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4994 }
4995 else
4996 {
4997 /* Perform a logical right shift to place the signbit in the least
4998 significant bit, then truncate the result to the desired mode
4999 and mask just this bit. */
5000 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5001 temp = gen_lowpart (rmode, temp);
5002 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5003 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5004 }
5005
5006 return temp;
5007 }
5008
5009 /* Expand fork or exec calls. TARGET is the desired target of the
5010 call. EXP is the call. FN is the
5011 identificator of the actual function. IGNORE is nonzero if the
5012 value is to be ignored. */
5013
5014 static rtx
5015 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5016 {
5017 tree id, decl;
5018 tree call;
5019
5020 /* If we are not profiling, just call the function. */
5021 if (!profile_arc_flag)
5022 return NULL_RTX;
5023
5024 /* Otherwise call the wrapper. This should be equivalent for the rest of
5025 compiler, so the code does not diverge, and the wrapper may run the
5026 code necessary for keeping the profiling sane. */
5027
5028 switch (DECL_FUNCTION_CODE (fn))
5029 {
5030 case BUILT_IN_FORK:
5031 id = get_identifier ("__gcov_fork");
5032 break;
5033
5034 case BUILT_IN_EXECL:
5035 id = get_identifier ("__gcov_execl");
5036 break;
5037
5038 case BUILT_IN_EXECV:
5039 id = get_identifier ("__gcov_execv");
5040 break;
5041
5042 case BUILT_IN_EXECLP:
5043 id = get_identifier ("__gcov_execlp");
5044 break;
5045
5046 case BUILT_IN_EXECLE:
5047 id = get_identifier ("__gcov_execle");
5048 break;
5049
5050 case BUILT_IN_EXECVP:
5051 id = get_identifier ("__gcov_execvp");
5052 break;
5053
5054 case BUILT_IN_EXECVE:
5055 id = get_identifier ("__gcov_execve");
5056 break;
5057
5058 default:
5059 gcc_unreachable ();
5060 }
5061
5062 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5063 FUNCTION_DECL, id, TREE_TYPE (fn));
5064 DECL_EXTERNAL (decl) = 1;
5065 TREE_PUBLIC (decl) = 1;
5066 DECL_ARTIFICIAL (decl) = 1;
5067 TREE_NOTHROW (decl) = 1;
5068 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5069 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5070 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5071 return expand_call (call, target, ignore);
5072 }
5073
5074
5075 \f
5076 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5077 the pointer in these functions is void*, the tree optimizers may remove
5078 casts. The mode computed in expand_builtin isn't reliable either, due
5079 to __sync_bool_compare_and_swap.
5080
5081 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5082 group of builtins. This gives us log2 of the mode size. */
5083
5084 static inline machine_mode
5085 get_builtin_sync_mode (int fcode_diff)
5086 {
5087 /* The size is not negotiable, so ask not to get BLKmode in return
5088 if the target indicates that a smaller size would be better. */
5089 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5090 }
5091
5092 /* Expand the memory expression LOC and return the appropriate memory operand
5093 for the builtin_sync operations. */
5094
5095 static rtx
5096 get_builtin_sync_mem (tree loc, machine_mode mode)
5097 {
5098 rtx addr, mem;
5099
5100 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5101 addr = convert_memory_address (Pmode, addr);
5102
5103 /* Note that we explicitly do not want any alias information for this
5104 memory, so that we kill all other live memories. Otherwise we don't
5105 satisfy the full barrier semantics of the intrinsic. */
5106 mem = validize_mem (gen_rtx_MEM (mode, addr));
5107
5108 /* The alignment needs to be at least according to that of the mode. */
5109 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5110 get_pointer_alignment (loc)));
5111 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5112 MEM_VOLATILE_P (mem) = 1;
5113
5114 return mem;
5115 }
5116
5117 /* Make sure an argument is in the right mode.
5118 EXP is the tree argument.
5119 MODE is the mode it should be in. */
5120
5121 static rtx
5122 expand_expr_force_mode (tree exp, machine_mode mode)
5123 {
5124 rtx val;
5125 machine_mode old_mode;
5126
5127 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5128 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5129 of CONST_INTs, where we know the old_mode only from the call argument. */
5130
5131 old_mode = GET_MODE (val);
5132 if (old_mode == VOIDmode)
5133 old_mode = TYPE_MODE (TREE_TYPE (exp));
5134 val = convert_modes (mode, old_mode, val, 1);
5135 return val;
5136 }
5137
5138
5139 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5140 EXP is the CALL_EXPR. CODE is the rtx code
5141 that corresponds to the arithmetic or logical operation from the name;
5142 an exception here is that NOT actually means NAND. TARGET is an optional
5143 place for us to store the results; AFTER is true if this is the
5144 fetch_and_xxx form. */
5145
5146 static rtx
5147 expand_builtin_sync_operation (machine_mode mode, tree exp,
5148 enum rtx_code code, bool after,
5149 rtx target)
5150 {
5151 rtx val, mem;
5152 location_t loc = EXPR_LOCATION (exp);
5153
5154 if (code == NOT && warn_sync_nand)
5155 {
5156 tree fndecl = get_callee_fndecl (exp);
5157 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5158
5159 static bool warned_f_a_n, warned_n_a_f;
5160
5161 switch (fcode)
5162 {
5163 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5164 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5165 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5166 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5167 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5168 if (warned_f_a_n)
5169 break;
5170
5171 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5172 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5173 warned_f_a_n = true;
5174 break;
5175
5176 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5177 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5178 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5179 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5180 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5181 if (warned_n_a_f)
5182 break;
5183
5184 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5185 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5186 warned_n_a_f = true;
5187 break;
5188
5189 default:
5190 gcc_unreachable ();
5191 }
5192 }
5193
5194 /* Expand the operands. */
5195 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5196 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5197
5198 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5199 after);
5200 }
5201
5202 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5203 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5204 true if this is the boolean form. TARGET is a place for us to store the
5205 results; this is NOT optional if IS_BOOL is true. */
5206
5207 static rtx
5208 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5209 bool is_bool, rtx target)
5210 {
5211 rtx old_val, new_val, mem;
5212 rtx *pbool, *poval;
5213
5214 /* Expand the operands. */
5215 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5216 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5217 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5218
5219 pbool = poval = NULL;
5220 if (target != const0_rtx)
5221 {
5222 if (is_bool)
5223 pbool = &target;
5224 else
5225 poval = &target;
5226 }
5227 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5228 false, MEMMODEL_SYNC_SEQ_CST,
5229 MEMMODEL_SYNC_SEQ_CST))
5230 return NULL_RTX;
5231
5232 return target;
5233 }
5234
5235 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5236 general form is actually an atomic exchange, and some targets only
5237 support a reduced form with the second argument being a constant 1.
5238 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5239 the results. */
5240
5241 static rtx
5242 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5243 rtx target)
5244 {
5245 rtx val, mem;
5246
5247 /* Expand the operands. */
5248 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5249 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5250
5251 return expand_sync_lock_test_and_set (target, mem, val);
5252 }
5253
5254 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5255
5256 static void
5257 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5258 {
5259 rtx mem;
5260
5261 /* Expand the operands. */
5262 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5263
5264 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5265 }
5266
5267 /* Given an integer representing an ``enum memmodel'', verify its
5268 correctness and return the memory model enum. */
5269
5270 static enum memmodel
5271 get_memmodel (tree exp)
5272 {
5273 rtx op;
5274 unsigned HOST_WIDE_INT val;
5275
5276 /* If the parameter is not a constant, it's a run time value so we'll just
5277 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5278 if (TREE_CODE (exp) != INTEGER_CST)
5279 return MEMMODEL_SEQ_CST;
5280
5281 op = expand_normal (exp);
5282
5283 val = INTVAL (op);
5284 if (targetm.memmodel_check)
5285 val = targetm.memmodel_check (val);
5286 else if (val & ~MEMMODEL_MASK)
5287 {
5288 warning (OPT_Winvalid_memory_model,
5289 "Unknown architecture specifier in memory model to builtin.");
5290 return MEMMODEL_SEQ_CST;
5291 }
5292
5293 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5294 if (memmodel_base (val) >= MEMMODEL_LAST)
5295 {
5296 warning (OPT_Winvalid_memory_model,
5297 "invalid memory model argument to builtin");
5298 return MEMMODEL_SEQ_CST;
5299 }
5300
5301 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5302 be conservative and promote consume to acquire. */
5303 if (val == MEMMODEL_CONSUME)
5304 val = MEMMODEL_ACQUIRE;
5305
5306 return (enum memmodel) val;
5307 }
5308
5309 /* Expand the __atomic_exchange intrinsic:
5310 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5311 EXP is the CALL_EXPR.
5312 TARGET is an optional place for us to store the results. */
5313
5314 static rtx
5315 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5316 {
5317 rtx val, mem;
5318 enum memmodel model;
5319
5320 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5321
5322 if (!flag_inline_atomics)
5323 return NULL_RTX;
5324
5325 /* Expand the operands. */
5326 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5327 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5328
5329 return expand_atomic_exchange (target, mem, val, model);
5330 }
5331
5332 /* Expand the __atomic_compare_exchange intrinsic:
5333 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5334 TYPE desired, BOOL weak,
5335 enum memmodel success,
5336 enum memmodel failure)
5337 EXP is the CALL_EXPR.
5338 TARGET is an optional place for us to store the results. */
5339
5340 static rtx
5341 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5342 rtx target)
5343 {
5344 rtx expect, desired, mem, oldval;
5345 rtx_code_label *label;
5346 enum memmodel success, failure;
5347 tree weak;
5348 bool is_weak;
5349
5350 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5351 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5352
5353 if (failure > success)
5354 {
5355 warning (OPT_Winvalid_memory_model,
5356 "failure memory model cannot be stronger than success memory "
5357 "model for %<__atomic_compare_exchange%>");
5358 success = MEMMODEL_SEQ_CST;
5359 }
5360
5361 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5362 {
5363 warning (OPT_Winvalid_memory_model,
5364 "invalid failure memory model for "
5365 "%<__atomic_compare_exchange%>");
5366 failure = MEMMODEL_SEQ_CST;
5367 success = MEMMODEL_SEQ_CST;
5368 }
5369
5370
5371 if (!flag_inline_atomics)
5372 return NULL_RTX;
5373
5374 /* Expand the operands. */
5375 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5376
5377 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5378 expect = convert_memory_address (Pmode, expect);
5379 expect = gen_rtx_MEM (mode, expect);
5380 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5381
5382 weak = CALL_EXPR_ARG (exp, 3);
5383 is_weak = false;
5384 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5385 is_weak = true;
5386
5387 if (target == const0_rtx)
5388 target = NULL;
5389
5390 /* Lest the rtl backend create a race condition with an imporoper store
5391 to memory, always create a new pseudo for OLDVAL. */
5392 oldval = NULL;
5393
5394 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5395 is_weak, success, failure))
5396 return NULL_RTX;
5397
5398 /* Conditionally store back to EXPECT, lest we create a race condition
5399 with an improper store to memory. */
5400 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5401 the normal case where EXPECT is totally private, i.e. a register. At
5402 which point the store can be unconditional. */
5403 label = gen_label_rtx ();
5404 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5405 GET_MODE (target), 1, label);
5406 emit_move_insn (expect, oldval);
5407 emit_label (label);
5408
5409 return target;
5410 }
5411
5412 /* Expand the __atomic_load intrinsic:
5413 TYPE __atomic_load (TYPE *object, enum memmodel)
5414 EXP is the CALL_EXPR.
5415 TARGET is an optional place for us to store the results. */
5416
5417 static rtx
5418 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5419 {
5420 rtx mem;
5421 enum memmodel model;
5422
5423 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5424 if (is_mm_release (model) || is_mm_acq_rel (model))
5425 {
5426 warning (OPT_Winvalid_memory_model,
5427 "invalid memory model for %<__atomic_load%>");
5428 model = MEMMODEL_SEQ_CST;
5429 }
5430
5431 if (!flag_inline_atomics)
5432 return NULL_RTX;
5433
5434 /* Expand the operand. */
5435 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5436
5437 return expand_atomic_load (target, mem, model);
5438 }
5439
5440
5441 /* Expand the __atomic_store intrinsic:
5442 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5443 EXP is the CALL_EXPR.
5444 TARGET is an optional place for us to store the results. */
5445
5446 static rtx
5447 expand_builtin_atomic_store (machine_mode mode, tree exp)
5448 {
5449 rtx mem, val;
5450 enum memmodel model;
5451
5452 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5453 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5454 || is_mm_release (model)))
5455 {
5456 warning (OPT_Winvalid_memory_model,
5457 "invalid memory model for %<__atomic_store%>");
5458 model = MEMMODEL_SEQ_CST;
5459 }
5460
5461 if (!flag_inline_atomics)
5462 return NULL_RTX;
5463
5464 /* Expand the operands. */
5465 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5466 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5467
5468 return expand_atomic_store (mem, val, model, false);
5469 }
5470
5471 /* Expand the __atomic_fetch_XXX intrinsic:
5472 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5473 EXP is the CALL_EXPR.
5474 TARGET is an optional place for us to store the results.
5475 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5476 FETCH_AFTER is true if returning the result of the operation.
5477 FETCH_AFTER is false if returning the value before the operation.
5478 IGNORE is true if the result is not used.
5479 EXT_CALL is the correct builtin for an external call if this cannot be
5480 resolved to an instruction sequence. */
5481
5482 static rtx
5483 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5484 enum rtx_code code, bool fetch_after,
5485 bool ignore, enum built_in_function ext_call)
5486 {
5487 rtx val, mem, ret;
5488 enum memmodel model;
5489 tree fndecl;
5490 tree addr;
5491
5492 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5493
5494 /* Expand the operands. */
5495 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5496 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5497
5498 /* Only try generating instructions if inlining is turned on. */
5499 if (flag_inline_atomics)
5500 {
5501 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5502 if (ret)
5503 return ret;
5504 }
5505
5506 /* Return if a different routine isn't needed for the library call. */
5507 if (ext_call == BUILT_IN_NONE)
5508 return NULL_RTX;
5509
5510 /* Change the call to the specified function. */
5511 fndecl = get_callee_fndecl (exp);
5512 addr = CALL_EXPR_FN (exp);
5513 STRIP_NOPS (addr);
5514
5515 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5516 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5517
5518 /* Expand the call here so we can emit trailing code. */
5519 ret = expand_call (exp, target, ignore);
5520
5521 /* Replace the original function just in case it matters. */
5522 TREE_OPERAND (addr, 0) = fndecl;
5523
5524 /* Then issue the arithmetic correction to return the right result. */
5525 if (!ignore)
5526 {
5527 if (code == NOT)
5528 {
5529 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5530 OPTAB_LIB_WIDEN);
5531 ret = expand_simple_unop (mode, NOT, ret, target, true);
5532 }
5533 else
5534 ret = expand_simple_binop (mode, code, ret, val, target, true,
5535 OPTAB_LIB_WIDEN);
5536 }
5537 return ret;
5538 }
5539
5540 /* Expand an atomic clear operation.
5541 void _atomic_clear (BOOL *obj, enum memmodel)
5542 EXP is the call expression. */
5543
5544 static rtx
5545 expand_builtin_atomic_clear (tree exp)
5546 {
5547 machine_mode mode;
5548 rtx mem, ret;
5549 enum memmodel model;
5550
5551 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5552 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5553 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5554
5555 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5556 {
5557 warning (OPT_Winvalid_memory_model,
5558 "invalid memory model for %<__atomic_store%>");
5559 model = MEMMODEL_SEQ_CST;
5560 }
5561
5562 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5563 Failing that, a store is issued by __atomic_store. The only way this can
5564 fail is if the bool type is larger than a word size. Unlikely, but
5565 handle it anyway for completeness. Assume a single threaded model since
5566 there is no atomic support in this case, and no barriers are required. */
5567 ret = expand_atomic_store (mem, const0_rtx, model, true);
5568 if (!ret)
5569 emit_move_insn (mem, const0_rtx);
5570 return const0_rtx;
5571 }
5572
5573 /* Expand an atomic test_and_set operation.
5574 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5575 EXP is the call expression. */
5576
5577 static rtx
5578 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5579 {
5580 rtx mem;
5581 enum memmodel model;
5582 machine_mode mode;
5583
5584 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5585 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5586 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5587
5588 return expand_atomic_test_and_set (target, mem, model);
5589 }
5590
5591
5592 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5593 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5594
5595 static tree
5596 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5597 {
5598 int size;
5599 machine_mode mode;
5600 unsigned int mode_align, type_align;
5601
5602 if (TREE_CODE (arg0) != INTEGER_CST)
5603 return NULL_TREE;
5604
5605 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5606 mode = mode_for_size (size, MODE_INT, 0);
5607 mode_align = GET_MODE_ALIGNMENT (mode);
5608
5609 if (TREE_CODE (arg1) == INTEGER_CST)
5610 {
5611 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5612
5613 /* Either this argument is null, or it's a fake pointer encoding
5614 the alignment of the object. */
5615 val = val & -val;
5616 val *= BITS_PER_UNIT;
5617
5618 if (val == 0 || mode_align < val)
5619 type_align = mode_align;
5620 else
5621 type_align = val;
5622 }
5623 else
5624 {
5625 tree ttype = TREE_TYPE (arg1);
5626
5627 /* This function is usually invoked and folded immediately by the front
5628 end before anything else has a chance to look at it. The pointer
5629 parameter at this point is usually cast to a void *, so check for that
5630 and look past the cast. */
5631 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5632 && VOID_TYPE_P (TREE_TYPE (ttype)))
5633 arg1 = TREE_OPERAND (arg1, 0);
5634
5635 ttype = TREE_TYPE (arg1);
5636 gcc_assert (POINTER_TYPE_P (ttype));
5637
5638 /* Get the underlying type of the object. */
5639 ttype = TREE_TYPE (ttype);
5640 type_align = TYPE_ALIGN (ttype);
5641 }
5642
5643 /* If the object has smaller alignment, the lock free routines cannot
5644 be used. */
5645 if (type_align < mode_align)
5646 return boolean_false_node;
5647
5648 /* Check if a compare_and_swap pattern exists for the mode which represents
5649 the required size. The pattern is not allowed to fail, so the existence
5650 of the pattern indicates support is present. */
5651 if (can_compare_and_swap_p (mode, true))
5652 return boolean_true_node;
5653 else
5654 return boolean_false_node;
5655 }
5656
5657 /* Return true if the parameters to call EXP represent an object which will
5658 always generate lock free instructions. The first argument represents the
5659 size of the object, and the second parameter is a pointer to the object
5660 itself. If NULL is passed for the object, then the result is based on
5661 typical alignment for an object of the specified size. Otherwise return
5662 false. */
5663
5664 static rtx
5665 expand_builtin_atomic_always_lock_free (tree exp)
5666 {
5667 tree size;
5668 tree arg0 = CALL_EXPR_ARG (exp, 0);
5669 tree arg1 = CALL_EXPR_ARG (exp, 1);
5670
5671 if (TREE_CODE (arg0) != INTEGER_CST)
5672 {
5673 error ("non-constant argument 1 to __atomic_always_lock_free");
5674 return const0_rtx;
5675 }
5676
5677 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5678 if (size == boolean_true_node)
5679 return const1_rtx;
5680 return const0_rtx;
5681 }
5682
5683 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5684 is lock free on this architecture. */
5685
5686 static tree
5687 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5688 {
5689 if (!flag_inline_atomics)
5690 return NULL_TREE;
5691
5692 /* If it isn't always lock free, don't generate a result. */
5693 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5694 return boolean_true_node;
5695
5696 return NULL_TREE;
5697 }
5698
5699 /* Return true if the parameters to call EXP represent an object which will
5700 always generate lock free instructions. The first argument represents the
5701 size of the object, and the second parameter is a pointer to the object
5702 itself. If NULL is passed for the object, then the result is based on
5703 typical alignment for an object of the specified size. Otherwise return
5704 NULL*/
5705
5706 static rtx
5707 expand_builtin_atomic_is_lock_free (tree exp)
5708 {
5709 tree size;
5710 tree arg0 = CALL_EXPR_ARG (exp, 0);
5711 tree arg1 = CALL_EXPR_ARG (exp, 1);
5712
5713 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5714 {
5715 error ("non-integer argument 1 to __atomic_is_lock_free");
5716 return NULL_RTX;
5717 }
5718
5719 if (!flag_inline_atomics)
5720 return NULL_RTX;
5721
5722 /* If the value is known at compile time, return the RTX for it. */
5723 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5724 if (size == boolean_true_node)
5725 return const1_rtx;
5726
5727 return NULL_RTX;
5728 }
5729
5730 /* Expand the __atomic_thread_fence intrinsic:
5731 void __atomic_thread_fence (enum memmodel)
5732 EXP is the CALL_EXPR. */
5733
5734 static void
5735 expand_builtin_atomic_thread_fence (tree exp)
5736 {
5737 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5738 expand_mem_thread_fence (model);
5739 }
5740
5741 /* Expand the __atomic_signal_fence intrinsic:
5742 void __atomic_signal_fence (enum memmodel)
5743 EXP is the CALL_EXPR. */
5744
5745 static void
5746 expand_builtin_atomic_signal_fence (tree exp)
5747 {
5748 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5749 expand_mem_signal_fence (model);
5750 }
5751
5752 /* Expand the __sync_synchronize intrinsic. */
5753
5754 static void
5755 expand_builtin_sync_synchronize (void)
5756 {
5757 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5758 }
5759
5760 static rtx
5761 expand_builtin_thread_pointer (tree exp, rtx target)
5762 {
5763 enum insn_code icode;
5764 if (!validate_arglist (exp, VOID_TYPE))
5765 return const0_rtx;
5766 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5767 if (icode != CODE_FOR_nothing)
5768 {
5769 struct expand_operand op;
5770 /* If the target is not sutitable then create a new target. */
5771 if (target == NULL_RTX
5772 || !REG_P (target)
5773 || GET_MODE (target) != Pmode)
5774 target = gen_reg_rtx (Pmode);
5775 create_output_operand (&op, target, Pmode);
5776 expand_insn (icode, 1, &op);
5777 return target;
5778 }
5779 error ("__builtin_thread_pointer is not supported on this target");
5780 return const0_rtx;
5781 }
5782
5783 static void
5784 expand_builtin_set_thread_pointer (tree exp)
5785 {
5786 enum insn_code icode;
5787 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5788 return;
5789 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5790 if (icode != CODE_FOR_nothing)
5791 {
5792 struct expand_operand op;
5793 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5794 Pmode, EXPAND_NORMAL);
5795 create_input_operand (&op, val, Pmode);
5796 expand_insn (icode, 1, &op);
5797 return;
5798 }
5799 error ("__builtin_set_thread_pointer is not supported on this target");
5800 }
5801
5802 \f
5803 /* Emit code to restore the current value of stack. */
5804
5805 static void
5806 expand_stack_restore (tree var)
5807 {
5808 rtx_insn *prev;
5809 rtx sa = expand_normal (var);
5810
5811 sa = convert_memory_address (Pmode, sa);
5812
5813 prev = get_last_insn ();
5814 emit_stack_restore (SAVE_BLOCK, sa);
5815
5816 record_new_stack_level ();
5817
5818 fixup_args_size_notes (prev, get_last_insn (), 0);
5819 }
5820
5821 /* Emit code to save the current value of stack. */
5822
5823 static rtx
5824 expand_stack_save (void)
5825 {
5826 rtx ret = NULL_RTX;
5827
5828 emit_stack_save (SAVE_BLOCK, &ret);
5829 return ret;
5830 }
5831
5832
5833 /* Expand an expression EXP that calls a built-in function,
5834 with result going to TARGET if that's convenient
5835 (and in mode MODE if that's convenient).
5836 SUBTARGET may be used as the target for computing one of EXP's operands.
5837 IGNORE is nonzero if the value is to be ignored. */
5838
5839 rtx
5840 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5841 int ignore)
5842 {
5843 tree fndecl = get_callee_fndecl (exp);
5844 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5845 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5846 int flags;
5847
5848 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5849 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5850
5851 /* When ASan is enabled, we don't want to expand some memory/string
5852 builtins and rely on libsanitizer's hooks. This allows us to avoid
5853 redundant checks and be sure, that possible overflow will be detected
5854 by ASan. */
5855
5856 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5857 return expand_call (exp, target, ignore);
5858
5859 /* When not optimizing, generate calls to library functions for a certain
5860 set of builtins. */
5861 if (!optimize
5862 && !called_as_built_in (fndecl)
5863 && fcode != BUILT_IN_FORK
5864 && fcode != BUILT_IN_EXECL
5865 && fcode != BUILT_IN_EXECV
5866 && fcode != BUILT_IN_EXECLP
5867 && fcode != BUILT_IN_EXECLE
5868 && fcode != BUILT_IN_EXECVP
5869 && fcode != BUILT_IN_EXECVE
5870 && fcode != BUILT_IN_ALLOCA
5871 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5872 && fcode != BUILT_IN_FREE
5873 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5874 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5875 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5876 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5877 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5878 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5879 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5880 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5881 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5882 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5883 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5884 && fcode != BUILT_IN_CHKP_BNDRET)
5885 return expand_call (exp, target, ignore);
5886
5887 /* The built-in function expanders test for target == const0_rtx
5888 to determine whether the function's result will be ignored. */
5889 if (ignore)
5890 target = const0_rtx;
5891
5892 /* If the result of a pure or const built-in function is ignored, and
5893 none of its arguments are volatile, we can avoid expanding the
5894 built-in call and just evaluate the arguments for side-effects. */
5895 if (target == const0_rtx
5896 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5897 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5898 {
5899 bool volatilep = false;
5900 tree arg;
5901 call_expr_arg_iterator iter;
5902
5903 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5904 if (TREE_THIS_VOLATILE (arg))
5905 {
5906 volatilep = true;
5907 break;
5908 }
5909
5910 if (! volatilep)
5911 {
5912 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5913 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5914 return const0_rtx;
5915 }
5916 }
5917
5918 /* expand_builtin_with_bounds is supposed to be used for
5919 instrumented builtin calls. */
5920 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5921
5922 switch (fcode)
5923 {
5924 CASE_FLT_FN (BUILT_IN_FABS):
5925 case BUILT_IN_FABSD32:
5926 case BUILT_IN_FABSD64:
5927 case BUILT_IN_FABSD128:
5928 target = expand_builtin_fabs (exp, target, subtarget);
5929 if (target)
5930 return target;
5931 break;
5932
5933 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5934 target = expand_builtin_copysign (exp, target, subtarget);
5935 if (target)
5936 return target;
5937 break;
5938
5939 /* Just do a normal library call if we were unable to fold
5940 the values. */
5941 CASE_FLT_FN (BUILT_IN_CABS):
5942 break;
5943
5944 CASE_FLT_FN (BUILT_IN_EXP):
5945 CASE_FLT_FN (BUILT_IN_EXP10):
5946 CASE_FLT_FN (BUILT_IN_POW10):
5947 CASE_FLT_FN (BUILT_IN_EXP2):
5948 CASE_FLT_FN (BUILT_IN_EXPM1):
5949 CASE_FLT_FN (BUILT_IN_LOGB):
5950 CASE_FLT_FN (BUILT_IN_LOG):
5951 CASE_FLT_FN (BUILT_IN_LOG10):
5952 CASE_FLT_FN (BUILT_IN_LOG2):
5953 CASE_FLT_FN (BUILT_IN_LOG1P):
5954 CASE_FLT_FN (BUILT_IN_TAN):
5955 CASE_FLT_FN (BUILT_IN_ASIN):
5956 CASE_FLT_FN (BUILT_IN_ACOS):
5957 CASE_FLT_FN (BUILT_IN_ATAN):
5958 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5959 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5960 because of possible accuracy problems. */
5961 if (! flag_unsafe_math_optimizations)
5962 break;
5963 CASE_FLT_FN (BUILT_IN_SQRT):
5964 CASE_FLT_FN (BUILT_IN_FLOOR):
5965 CASE_FLT_FN (BUILT_IN_CEIL):
5966 CASE_FLT_FN (BUILT_IN_TRUNC):
5967 CASE_FLT_FN (BUILT_IN_ROUND):
5968 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5969 CASE_FLT_FN (BUILT_IN_RINT):
5970 target = expand_builtin_mathfn (exp, target, subtarget);
5971 if (target)
5972 return target;
5973 break;
5974
5975 CASE_FLT_FN (BUILT_IN_FMA):
5976 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5977 if (target)
5978 return target;
5979 break;
5980
5981 CASE_FLT_FN (BUILT_IN_ILOGB):
5982 if (! flag_unsafe_math_optimizations)
5983 break;
5984 CASE_FLT_FN (BUILT_IN_ISINF):
5985 CASE_FLT_FN (BUILT_IN_FINITE):
5986 case BUILT_IN_ISFINITE:
5987 case BUILT_IN_ISNORMAL:
5988 target = expand_builtin_interclass_mathfn (exp, target);
5989 if (target)
5990 return target;
5991 break;
5992
5993 CASE_FLT_FN (BUILT_IN_ICEIL):
5994 CASE_FLT_FN (BUILT_IN_LCEIL):
5995 CASE_FLT_FN (BUILT_IN_LLCEIL):
5996 CASE_FLT_FN (BUILT_IN_LFLOOR):
5997 CASE_FLT_FN (BUILT_IN_IFLOOR):
5998 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5999 target = expand_builtin_int_roundingfn (exp, target);
6000 if (target)
6001 return target;
6002 break;
6003
6004 CASE_FLT_FN (BUILT_IN_IRINT):
6005 CASE_FLT_FN (BUILT_IN_LRINT):
6006 CASE_FLT_FN (BUILT_IN_LLRINT):
6007 CASE_FLT_FN (BUILT_IN_IROUND):
6008 CASE_FLT_FN (BUILT_IN_LROUND):
6009 CASE_FLT_FN (BUILT_IN_LLROUND):
6010 target = expand_builtin_int_roundingfn_2 (exp, target);
6011 if (target)
6012 return target;
6013 break;
6014
6015 CASE_FLT_FN (BUILT_IN_POWI):
6016 target = expand_builtin_powi (exp, target);
6017 if (target)
6018 return target;
6019 break;
6020
6021 CASE_FLT_FN (BUILT_IN_ATAN2):
6022 CASE_FLT_FN (BUILT_IN_LDEXP):
6023 CASE_FLT_FN (BUILT_IN_SCALB):
6024 CASE_FLT_FN (BUILT_IN_SCALBN):
6025 CASE_FLT_FN (BUILT_IN_SCALBLN):
6026 if (! flag_unsafe_math_optimizations)
6027 break;
6028
6029 CASE_FLT_FN (BUILT_IN_FMOD):
6030 CASE_FLT_FN (BUILT_IN_REMAINDER):
6031 CASE_FLT_FN (BUILT_IN_DREM):
6032 CASE_FLT_FN (BUILT_IN_POW):
6033 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6034 if (target)
6035 return target;
6036 break;
6037
6038 CASE_FLT_FN (BUILT_IN_CEXPI):
6039 target = expand_builtin_cexpi (exp, target);
6040 gcc_assert (target);
6041 return target;
6042
6043 CASE_FLT_FN (BUILT_IN_SIN):
6044 CASE_FLT_FN (BUILT_IN_COS):
6045 if (! flag_unsafe_math_optimizations)
6046 break;
6047 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6048 if (target)
6049 return target;
6050 break;
6051
6052 CASE_FLT_FN (BUILT_IN_SINCOS):
6053 if (! flag_unsafe_math_optimizations)
6054 break;
6055 target = expand_builtin_sincos (exp);
6056 if (target)
6057 return target;
6058 break;
6059
6060 case BUILT_IN_APPLY_ARGS:
6061 return expand_builtin_apply_args ();
6062
6063 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6064 FUNCTION with a copy of the parameters described by
6065 ARGUMENTS, and ARGSIZE. It returns a block of memory
6066 allocated on the stack into which is stored all the registers
6067 that might possibly be used for returning the result of a
6068 function. ARGUMENTS is the value returned by
6069 __builtin_apply_args. ARGSIZE is the number of bytes of
6070 arguments that must be copied. ??? How should this value be
6071 computed? We'll also need a safe worst case value for varargs
6072 functions. */
6073 case BUILT_IN_APPLY:
6074 if (!validate_arglist (exp, POINTER_TYPE,
6075 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6076 && !validate_arglist (exp, REFERENCE_TYPE,
6077 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6078 return const0_rtx;
6079 else
6080 {
6081 rtx ops[3];
6082
6083 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6084 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6085 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6086
6087 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6088 }
6089
6090 /* __builtin_return (RESULT) causes the function to return the
6091 value described by RESULT. RESULT is address of the block of
6092 memory returned by __builtin_apply. */
6093 case BUILT_IN_RETURN:
6094 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6095 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6096 return const0_rtx;
6097
6098 case BUILT_IN_SAVEREGS:
6099 return expand_builtin_saveregs ();
6100
6101 case BUILT_IN_VA_ARG_PACK:
6102 /* All valid uses of __builtin_va_arg_pack () are removed during
6103 inlining. */
6104 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6105 return const0_rtx;
6106
6107 case BUILT_IN_VA_ARG_PACK_LEN:
6108 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6109 inlining. */
6110 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6111 return const0_rtx;
6112
6113 /* Return the address of the first anonymous stack arg. */
6114 case BUILT_IN_NEXT_ARG:
6115 if (fold_builtin_next_arg (exp, false))
6116 return const0_rtx;
6117 return expand_builtin_next_arg ();
6118
6119 case BUILT_IN_CLEAR_CACHE:
6120 target = expand_builtin___clear_cache (exp);
6121 if (target)
6122 return target;
6123 break;
6124
6125 case BUILT_IN_CLASSIFY_TYPE:
6126 return expand_builtin_classify_type (exp);
6127
6128 case BUILT_IN_CONSTANT_P:
6129 return const0_rtx;
6130
6131 case BUILT_IN_FRAME_ADDRESS:
6132 case BUILT_IN_RETURN_ADDRESS:
6133 return expand_builtin_frame_address (fndecl, exp);
6134
6135 /* Returns the address of the area where the structure is returned.
6136 0 otherwise. */
6137 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6138 if (call_expr_nargs (exp) != 0
6139 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6140 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6141 return const0_rtx;
6142 else
6143 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6144
6145 case BUILT_IN_ALLOCA:
6146 case BUILT_IN_ALLOCA_WITH_ALIGN:
6147 /* If the allocation stems from the declaration of a variable-sized
6148 object, it cannot accumulate. */
6149 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6150 if (target)
6151 return target;
6152 break;
6153
6154 case BUILT_IN_STACK_SAVE:
6155 return expand_stack_save ();
6156
6157 case BUILT_IN_STACK_RESTORE:
6158 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6159 return const0_rtx;
6160
6161 case BUILT_IN_BSWAP16:
6162 case BUILT_IN_BSWAP32:
6163 case BUILT_IN_BSWAP64:
6164 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6165 if (target)
6166 return target;
6167 break;
6168
6169 CASE_INT_FN (BUILT_IN_FFS):
6170 target = expand_builtin_unop (target_mode, exp, target,
6171 subtarget, ffs_optab);
6172 if (target)
6173 return target;
6174 break;
6175
6176 CASE_INT_FN (BUILT_IN_CLZ):
6177 target = expand_builtin_unop (target_mode, exp, target,
6178 subtarget, clz_optab);
6179 if (target)
6180 return target;
6181 break;
6182
6183 CASE_INT_FN (BUILT_IN_CTZ):
6184 target = expand_builtin_unop (target_mode, exp, target,
6185 subtarget, ctz_optab);
6186 if (target)
6187 return target;
6188 break;
6189
6190 CASE_INT_FN (BUILT_IN_CLRSB):
6191 target = expand_builtin_unop (target_mode, exp, target,
6192 subtarget, clrsb_optab);
6193 if (target)
6194 return target;
6195 break;
6196
6197 CASE_INT_FN (BUILT_IN_POPCOUNT):
6198 target = expand_builtin_unop (target_mode, exp, target,
6199 subtarget, popcount_optab);
6200 if (target)
6201 return target;
6202 break;
6203
6204 CASE_INT_FN (BUILT_IN_PARITY):
6205 target = expand_builtin_unop (target_mode, exp, target,
6206 subtarget, parity_optab);
6207 if (target)
6208 return target;
6209 break;
6210
6211 case BUILT_IN_STRLEN:
6212 target = expand_builtin_strlen (exp, target, target_mode);
6213 if (target)
6214 return target;
6215 break;
6216
6217 case BUILT_IN_STRCPY:
6218 target = expand_builtin_strcpy (exp, target);
6219 if (target)
6220 return target;
6221 break;
6222
6223 case BUILT_IN_STRNCPY:
6224 target = expand_builtin_strncpy (exp, target);
6225 if (target)
6226 return target;
6227 break;
6228
6229 case BUILT_IN_STPCPY:
6230 target = expand_builtin_stpcpy (exp, target, mode);
6231 if (target)
6232 return target;
6233 break;
6234
6235 case BUILT_IN_MEMCPY:
6236 target = expand_builtin_memcpy (exp, target);
6237 if (target)
6238 return target;
6239 break;
6240
6241 case BUILT_IN_MEMPCPY:
6242 target = expand_builtin_mempcpy (exp, target, mode);
6243 if (target)
6244 return target;
6245 break;
6246
6247 case BUILT_IN_MEMSET:
6248 target = expand_builtin_memset (exp, target, mode);
6249 if (target)
6250 return target;
6251 break;
6252
6253 case BUILT_IN_BZERO:
6254 target = expand_builtin_bzero (exp);
6255 if (target)
6256 return target;
6257 break;
6258
6259 case BUILT_IN_STRCMP:
6260 target = expand_builtin_strcmp (exp, target);
6261 if (target)
6262 return target;
6263 break;
6264
6265 case BUILT_IN_STRNCMP:
6266 target = expand_builtin_strncmp (exp, target, mode);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_BCMP:
6272 case BUILT_IN_MEMCMP:
6273 target = expand_builtin_memcmp (exp, target);
6274 if (target)
6275 return target;
6276 break;
6277
6278 case BUILT_IN_SETJMP:
6279 /* This should have been lowered to the builtins below. */
6280 gcc_unreachable ();
6281
6282 case BUILT_IN_SETJMP_SETUP:
6283 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6284 and the receiver label. */
6285 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6286 {
6287 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6288 VOIDmode, EXPAND_NORMAL);
6289 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6290 rtx_insn *label_r = label_rtx (label);
6291
6292 /* This is copied from the handling of non-local gotos. */
6293 expand_builtin_setjmp_setup (buf_addr, label_r);
6294 nonlocal_goto_handler_labels
6295 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6296 nonlocal_goto_handler_labels);
6297 /* ??? Do not let expand_label treat us as such since we would
6298 not want to be both on the list of non-local labels and on
6299 the list of forced labels. */
6300 FORCED_LABEL (label) = 0;
6301 return const0_rtx;
6302 }
6303 break;
6304
6305 case BUILT_IN_SETJMP_RECEIVER:
6306 /* __builtin_setjmp_receiver is passed the receiver label. */
6307 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6308 {
6309 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6310 rtx_insn *label_r = label_rtx (label);
6311
6312 expand_builtin_setjmp_receiver (label_r);
6313 return const0_rtx;
6314 }
6315 break;
6316
6317 /* __builtin_longjmp is passed a pointer to an array of five words.
6318 It's similar to the C library longjmp function but works with
6319 __builtin_setjmp above. */
6320 case BUILT_IN_LONGJMP:
6321 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6322 {
6323 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6324 VOIDmode, EXPAND_NORMAL);
6325 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6326
6327 if (value != const1_rtx)
6328 {
6329 error ("%<__builtin_longjmp%> second argument must be 1");
6330 return const0_rtx;
6331 }
6332
6333 expand_builtin_longjmp (buf_addr, value);
6334 return const0_rtx;
6335 }
6336 break;
6337
6338 case BUILT_IN_NONLOCAL_GOTO:
6339 target = expand_builtin_nonlocal_goto (exp);
6340 if (target)
6341 return target;
6342 break;
6343
6344 /* This updates the setjmp buffer that is its argument with the value
6345 of the current stack pointer. */
6346 case BUILT_IN_UPDATE_SETJMP_BUF:
6347 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6348 {
6349 rtx buf_addr
6350 = expand_normal (CALL_EXPR_ARG (exp, 0));
6351
6352 expand_builtin_update_setjmp_buf (buf_addr);
6353 return const0_rtx;
6354 }
6355 break;
6356
6357 case BUILT_IN_TRAP:
6358 expand_builtin_trap ();
6359 return const0_rtx;
6360
6361 case BUILT_IN_UNREACHABLE:
6362 expand_builtin_unreachable ();
6363 return const0_rtx;
6364
6365 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6366 case BUILT_IN_SIGNBITD32:
6367 case BUILT_IN_SIGNBITD64:
6368 case BUILT_IN_SIGNBITD128:
6369 target = expand_builtin_signbit (exp, target);
6370 if (target)
6371 return target;
6372 break;
6373
6374 /* Various hooks for the DWARF 2 __throw routine. */
6375 case BUILT_IN_UNWIND_INIT:
6376 expand_builtin_unwind_init ();
6377 return const0_rtx;
6378 case BUILT_IN_DWARF_CFA:
6379 return virtual_cfa_rtx;
6380 #ifdef DWARF2_UNWIND_INFO
6381 case BUILT_IN_DWARF_SP_COLUMN:
6382 return expand_builtin_dwarf_sp_column ();
6383 case BUILT_IN_INIT_DWARF_REG_SIZES:
6384 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6385 return const0_rtx;
6386 #endif
6387 case BUILT_IN_FROB_RETURN_ADDR:
6388 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6389 case BUILT_IN_EXTRACT_RETURN_ADDR:
6390 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6391 case BUILT_IN_EH_RETURN:
6392 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6393 CALL_EXPR_ARG (exp, 1));
6394 return const0_rtx;
6395 case BUILT_IN_EH_RETURN_DATA_REGNO:
6396 return expand_builtin_eh_return_data_regno (exp);
6397 case BUILT_IN_EXTEND_POINTER:
6398 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6399 case BUILT_IN_EH_POINTER:
6400 return expand_builtin_eh_pointer (exp);
6401 case BUILT_IN_EH_FILTER:
6402 return expand_builtin_eh_filter (exp);
6403 case BUILT_IN_EH_COPY_VALUES:
6404 return expand_builtin_eh_copy_values (exp);
6405
6406 case BUILT_IN_VA_START:
6407 return expand_builtin_va_start (exp);
6408 case BUILT_IN_VA_END:
6409 return expand_builtin_va_end (exp);
6410 case BUILT_IN_VA_COPY:
6411 return expand_builtin_va_copy (exp);
6412 case BUILT_IN_EXPECT:
6413 return expand_builtin_expect (exp, target);
6414 case BUILT_IN_ASSUME_ALIGNED:
6415 return expand_builtin_assume_aligned (exp, target);
6416 case BUILT_IN_PREFETCH:
6417 expand_builtin_prefetch (exp);
6418 return const0_rtx;
6419
6420 case BUILT_IN_INIT_TRAMPOLINE:
6421 return expand_builtin_init_trampoline (exp, true);
6422 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6423 return expand_builtin_init_trampoline (exp, false);
6424 case BUILT_IN_ADJUST_TRAMPOLINE:
6425 return expand_builtin_adjust_trampoline (exp);
6426
6427 case BUILT_IN_FORK:
6428 case BUILT_IN_EXECL:
6429 case BUILT_IN_EXECV:
6430 case BUILT_IN_EXECLP:
6431 case BUILT_IN_EXECLE:
6432 case BUILT_IN_EXECVP:
6433 case BUILT_IN_EXECVE:
6434 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6435 if (target)
6436 return target;
6437 break;
6438
6439 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6440 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6441 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6442 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6443 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6444 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6445 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6446 if (target)
6447 return target;
6448 break;
6449
6450 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6451 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6452 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6453 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6454 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6455 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6456 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6457 if (target)
6458 return target;
6459 break;
6460
6461 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6462 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6463 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6464 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6465 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6466 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6467 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6468 if (target)
6469 return target;
6470 break;
6471
6472 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6473 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6474 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6475 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6476 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6477 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6478 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6479 if (target)
6480 return target;
6481 break;
6482
6483 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6484 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6485 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6486 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6487 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6488 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6489 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6490 if (target)
6491 return target;
6492 break;
6493
6494 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6495 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6496 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6497 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6498 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6500 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6501 if (target)
6502 return target;
6503 break;
6504
6505 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6506 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6507 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6508 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6509 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6510 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6511 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6512 if (target)
6513 return target;
6514 break;
6515
6516 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6517 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6518 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6519 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6520 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6521 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6522 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6523 if (target)
6524 return target;
6525 break;
6526
6527 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6528 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6529 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6530 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6531 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6532 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6533 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6534 if (target)
6535 return target;
6536 break;
6537
6538 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6539 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6540 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6541 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6542 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6544 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6545 if (target)
6546 return target;
6547 break;
6548
6549 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6550 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6551 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6552 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6553 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6555 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6556 if (target)
6557 return target;
6558 break;
6559
6560 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6561 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6562 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6563 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6564 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6565 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6566 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6567 if (target)
6568 return target;
6569 break;
6570
6571 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6572 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6573 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6574 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6575 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6576 if (mode == VOIDmode)
6577 mode = TYPE_MODE (boolean_type_node);
6578 if (!target || !register_operand (target, mode))
6579 target = gen_reg_rtx (mode);
6580
6581 mode = get_builtin_sync_mode
6582 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6583 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6584 if (target)
6585 return target;
6586 break;
6587
6588 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6589 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6590 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6591 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6592 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6593 mode = get_builtin_sync_mode
6594 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6595 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6596 if (target)
6597 return target;
6598 break;
6599
6600 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6601 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6603 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6604 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6605 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6606 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6607 if (target)
6608 return target;
6609 break;
6610
6611 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6612 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6613 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6614 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6615 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6616 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6617 expand_builtin_sync_lock_release (mode, exp);
6618 return const0_rtx;
6619
6620 case BUILT_IN_SYNC_SYNCHRONIZE:
6621 expand_builtin_sync_synchronize ();
6622 return const0_rtx;
6623
6624 case BUILT_IN_ATOMIC_EXCHANGE_1:
6625 case BUILT_IN_ATOMIC_EXCHANGE_2:
6626 case BUILT_IN_ATOMIC_EXCHANGE_4:
6627 case BUILT_IN_ATOMIC_EXCHANGE_8:
6628 case BUILT_IN_ATOMIC_EXCHANGE_16:
6629 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6630 target = expand_builtin_atomic_exchange (mode, exp, target);
6631 if (target)
6632 return target;
6633 break;
6634
6635 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6636 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6637 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6638 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6639 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6640 {
6641 unsigned int nargs, z;
6642 vec<tree, va_gc> *vec;
6643
6644 mode =
6645 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6646 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6647 if (target)
6648 return target;
6649
6650 /* If this is turned into an external library call, the weak parameter
6651 must be dropped to match the expected parameter list. */
6652 nargs = call_expr_nargs (exp);
6653 vec_alloc (vec, nargs - 1);
6654 for (z = 0; z < 3; z++)
6655 vec->quick_push (CALL_EXPR_ARG (exp, z));
6656 /* Skip the boolean weak parameter. */
6657 for (z = 4; z < 6; z++)
6658 vec->quick_push (CALL_EXPR_ARG (exp, z));
6659 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6660 break;
6661 }
6662
6663 case BUILT_IN_ATOMIC_LOAD_1:
6664 case BUILT_IN_ATOMIC_LOAD_2:
6665 case BUILT_IN_ATOMIC_LOAD_4:
6666 case BUILT_IN_ATOMIC_LOAD_8:
6667 case BUILT_IN_ATOMIC_LOAD_16:
6668 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6669 target = expand_builtin_atomic_load (mode, exp, target);
6670 if (target)
6671 return target;
6672 break;
6673
6674 case BUILT_IN_ATOMIC_STORE_1:
6675 case BUILT_IN_ATOMIC_STORE_2:
6676 case BUILT_IN_ATOMIC_STORE_4:
6677 case BUILT_IN_ATOMIC_STORE_8:
6678 case BUILT_IN_ATOMIC_STORE_16:
6679 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6680 target = expand_builtin_atomic_store (mode, exp);
6681 if (target)
6682 return const0_rtx;
6683 break;
6684
6685 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6686 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6687 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6688 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6689 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6690 {
6691 enum built_in_function lib;
6692 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6693 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6694 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6695 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6696 ignore, lib);
6697 if (target)
6698 return target;
6699 break;
6700 }
6701 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6702 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6703 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6704 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6705 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6706 {
6707 enum built_in_function lib;
6708 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6709 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6710 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6711 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6712 ignore, lib);
6713 if (target)
6714 return target;
6715 break;
6716 }
6717 case BUILT_IN_ATOMIC_AND_FETCH_1:
6718 case BUILT_IN_ATOMIC_AND_FETCH_2:
6719 case BUILT_IN_ATOMIC_AND_FETCH_4:
6720 case BUILT_IN_ATOMIC_AND_FETCH_8:
6721 case BUILT_IN_ATOMIC_AND_FETCH_16:
6722 {
6723 enum built_in_function lib;
6724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6725 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6726 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6727 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6728 ignore, lib);
6729 if (target)
6730 return target;
6731 break;
6732 }
6733 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6734 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6735 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6736 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6737 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6738 {
6739 enum built_in_function lib;
6740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6741 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6742 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6744 ignore, lib);
6745 if (target)
6746 return target;
6747 break;
6748 }
6749 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6750 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6751 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6752 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6753 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6754 {
6755 enum built_in_function lib;
6756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6757 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6758 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6759 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6760 ignore, lib);
6761 if (target)
6762 return target;
6763 break;
6764 }
6765 case BUILT_IN_ATOMIC_OR_FETCH_1:
6766 case BUILT_IN_ATOMIC_OR_FETCH_2:
6767 case BUILT_IN_ATOMIC_OR_FETCH_4:
6768 case BUILT_IN_ATOMIC_OR_FETCH_8:
6769 case BUILT_IN_ATOMIC_OR_FETCH_16:
6770 {
6771 enum built_in_function lib;
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6773 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6774 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6775 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6776 ignore, lib);
6777 if (target)
6778 return target;
6779 break;
6780 }
6781 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6782 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6783 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6784 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6785 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6787 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6788 ignore, BUILT_IN_NONE);
6789 if (target)
6790 return target;
6791 break;
6792
6793 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6794 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6795 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6796 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6797 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6798 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6799 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6800 ignore, BUILT_IN_NONE);
6801 if (target)
6802 return target;
6803 break;
6804
6805 case BUILT_IN_ATOMIC_FETCH_AND_1:
6806 case BUILT_IN_ATOMIC_FETCH_AND_2:
6807 case BUILT_IN_ATOMIC_FETCH_AND_4:
6808 case BUILT_IN_ATOMIC_FETCH_AND_8:
6809 case BUILT_IN_ATOMIC_FETCH_AND_16:
6810 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6811 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6812 ignore, BUILT_IN_NONE);
6813 if (target)
6814 return target;
6815 break;
6816
6817 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6818 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6819 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6820 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6821 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6822 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6823 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6824 ignore, BUILT_IN_NONE);
6825 if (target)
6826 return target;
6827 break;
6828
6829 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6830 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6831 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6832 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6833 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6834 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6835 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6836 ignore, BUILT_IN_NONE);
6837 if (target)
6838 return target;
6839 break;
6840
6841 case BUILT_IN_ATOMIC_FETCH_OR_1:
6842 case BUILT_IN_ATOMIC_FETCH_OR_2:
6843 case BUILT_IN_ATOMIC_FETCH_OR_4:
6844 case BUILT_IN_ATOMIC_FETCH_OR_8:
6845 case BUILT_IN_ATOMIC_FETCH_OR_16:
6846 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6847 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6848 ignore, BUILT_IN_NONE);
6849 if (target)
6850 return target;
6851 break;
6852
6853 case BUILT_IN_ATOMIC_TEST_AND_SET:
6854 return expand_builtin_atomic_test_and_set (exp, target);
6855
6856 case BUILT_IN_ATOMIC_CLEAR:
6857 return expand_builtin_atomic_clear (exp);
6858
6859 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6860 return expand_builtin_atomic_always_lock_free (exp);
6861
6862 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6863 target = expand_builtin_atomic_is_lock_free (exp);
6864 if (target)
6865 return target;
6866 break;
6867
6868 case BUILT_IN_ATOMIC_THREAD_FENCE:
6869 expand_builtin_atomic_thread_fence (exp);
6870 return const0_rtx;
6871
6872 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6873 expand_builtin_atomic_signal_fence (exp);
6874 return const0_rtx;
6875
6876 case BUILT_IN_OBJECT_SIZE:
6877 return expand_builtin_object_size (exp);
6878
6879 case BUILT_IN_MEMCPY_CHK:
6880 case BUILT_IN_MEMPCPY_CHK:
6881 case BUILT_IN_MEMMOVE_CHK:
6882 case BUILT_IN_MEMSET_CHK:
6883 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6884 if (target)
6885 return target;
6886 break;
6887
6888 case BUILT_IN_STRCPY_CHK:
6889 case BUILT_IN_STPCPY_CHK:
6890 case BUILT_IN_STRNCPY_CHK:
6891 case BUILT_IN_STPNCPY_CHK:
6892 case BUILT_IN_STRCAT_CHK:
6893 case BUILT_IN_STRNCAT_CHK:
6894 case BUILT_IN_SNPRINTF_CHK:
6895 case BUILT_IN_VSNPRINTF_CHK:
6896 maybe_emit_chk_warning (exp, fcode);
6897 break;
6898
6899 case BUILT_IN_SPRINTF_CHK:
6900 case BUILT_IN_VSPRINTF_CHK:
6901 maybe_emit_sprintf_chk_warning (exp, fcode);
6902 break;
6903
6904 case BUILT_IN_FREE:
6905 if (warn_free_nonheap_object)
6906 maybe_emit_free_warning (exp);
6907 break;
6908
6909 case BUILT_IN_THREAD_POINTER:
6910 return expand_builtin_thread_pointer (exp, target);
6911
6912 case BUILT_IN_SET_THREAD_POINTER:
6913 expand_builtin_set_thread_pointer (exp);
6914 return const0_rtx;
6915
6916 case BUILT_IN_CILK_DETACH:
6917 expand_builtin_cilk_detach (exp);
6918 return const0_rtx;
6919
6920 case BUILT_IN_CILK_POP_FRAME:
6921 expand_builtin_cilk_pop_frame (exp);
6922 return const0_rtx;
6923
6924 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6925 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6926 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6927 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6928 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6929 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6930 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6931 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6932 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6933 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6934 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6935 /* We allow user CHKP builtins if Pointer Bounds
6936 Checker is off. */
6937 if (!chkp_function_instrumented_p (current_function_decl))
6938 {
6939 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6940 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6941 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6942 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6943 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6944 return expand_normal (CALL_EXPR_ARG (exp, 0));
6945 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6946 return expand_normal (size_zero_node);
6947 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6948 return expand_normal (size_int (-1));
6949 else
6950 return const0_rtx;
6951 }
6952 /* FALLTHROUGH */
6953
6954 case BUILT_IN_CHKP_BNDMK:
6955 case BUILT_IN_CHKP_BNDSTX:
6956 case BUILT_IN_CHKP_BNDCL:
6957 case BUILT_IN_CHKP_BNDCU:
6958 case BUILT_IN_CHKP_BNDLDX:
6959 case BUILT_IN_CHKP_BNDRET:
6960 case BUILT_IN_CHKP_INTERSECT:
6961 case BUILT_IN_CHKP_NARROW:
6962 case BUILT_IN_CHKP_EXTRACT_LOWER:
6963 case BUILT_IN_CHKP_EXTRACT_UPPER:
6964 /* Software implementation of Pointer Bounds Checker is NYI.
6965 Target support is required. */
6966 error ("Your target platform does not support -fcheck-pointer-bounds");
6967 break;
6968
6969 case BUILT_IN_ACC_ON_DEVICE:
6970 /* Do library call, if we failed to expand the builtin when
6971 folding. */
6972 break;
6973
6974 default: /* just do library call, if unknown builtin */
6975 break;
6976 }
6977
6978 /* The switch statement above can drop through to cause the function
6979 to be called normally. */
6980 return expand_call (exp, target, ignore);
6981 }
6982
6983 /* Similar to expand_builtin but is used for instrumented calls. */
6984
6985 rtx
6986 expand_builtin_with_bounds (tree exp, rtx target,
6987 rtx subtarget ATTRIBUTE_UNUSED,
6988 machine_mode mode, int ignore)
6989 {
6990 tree fndecl = get_callee_fndecl (exp);
6991 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6992
6993 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6994
6995 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6996 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6997
6998 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6999 && fcode < END_CHKP_BUILTINS);
7000
7001 switch (fcode)
7002 {
7003 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7004 target = expand_builtin_memcpy_with_bounds (exp, target);
7005 if (target)
7006 return target;
7007 break;
7008
7009 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7010 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7011 if (target)
7012 return target;
7013 break;
7014
7015 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7016 target = expand_builtin_memset_with_bounds (exp, target, mode);
7017 if (target)
7018 return target;
7019 break;
7020
7021 default:
7022 break;
7023 }
7024
7025 /* The switch statement above can drop through to cause the function
7026 to be called normally. */
7027 return expand_call (exp, target, ignore);
7028 }
7029
7030 /* Determine whether a tree node represents a call to a built-in
7031 function. If the tree T is a call to a built-in function with
7032 the right number of arguments of the appropriate types, return
7033 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7034 Otherwise the return value is END_BUILTINS. */
7035
7036 enum built_in_function
7037 builtin_mathfn_code (const_tree t)
7038 {
7039 const_tree fndecl, arg, parmlist;
7040 const_tree argtype, parmtype;
7041 const_call_expr_arg_iterator iter;
7042
7043 if (TREE_CODE (t) != CALL_EXPR
7044 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7045 return END_BUILTINS;
7046
7047 fndecl = get_callee_fndecl (t);
7048 if (fndecl == NULL_TREE
7049 || TREE_CODE (fndecl) != FUNCTION_DECL
7050 || ! DECL_BUILT_IN (fndecl)
7051 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7052 return END_BUILTINS;
7053
7054 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7055 init_const_call_expr_arg_iterator (t, &iter);
7056 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7057 {
7058 /* If a function doesn't take a variable number of arguments,
7059 the last element in the list will have type `void'. */
7060 parmtype = TREE_VALUE (parmlist);
7061 if (VOID_TYPE_P (parmtype))
7062 {
7063 if (more_const_call_expr_args_p (&iter))
7064 return END_BUILTINS;
7065 return DECL_FUNCTION_CODE (fndecl);
7066 }
7067
7068 if (! more_const_call_expr_args_p (&iter))
7069 return END_BUILTINS;
7070
7071 arg = next_const_call_expr_arg (&iter);
7072 argtype = TREE_TYPE (arg);
7073
7074 if (SCALAR_FLOAT_TYPE_P (parmtype))
7075 {
7076 if (! SCALAR_FLOAT_TYPE_P (argtype))
7077 return END_BUILTINS;
7078 }
7079 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7080 {
7081 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7082 return END_BUILTINS;
7083 }
7084 else if (POINTER_TYPE_P (parmtype))
7085 {
7086 if (! POINTER_TYPE_P (argtype))
7087 return END_BUILTINS;
7088 }
7089 else if (INTEGRAL_TYPE_P (parmtype))
7090 {
7091 if (! INTEGRAL_TYPE_P (argtype))
7092 return END_BUILTINS;
7093 }
7094 else
7095 return END_BUILTINS;
7096 }
7097
7098 /* Variable-length argument list. */
7099 return DECL_FUNCTION_CODE (fndecl);
7100 }
7101
7102 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7103 evaluate to a constant. */
7104
7105 static tree
7106 fold_builtin_constant_p (tree arg)
7107 {
7108 /* We return 1 for a numeric type that's known to be a constant
7109 value at compile-time or for an aggregate type that's a
7110 literal constant. */
7111 STRIP_NOPS (arg);
7112
7113 /* If we know this is a constant, emit the constant of one. */
7114 if (CONSTANT_CLASS_P (arg)
7115 || (TREE_CODE (arg) == CONSTRUCTOR
7116 && TREE_CONSTANT (arg)))
7117 return integer_one_node;
7118 if (TREE_CODE (arg) == ADDR_EXPR)
7119 {
7120 tree op = TREE_OPERAND (arg, 0);
7121 if (TREE_CODE (op) == STRING_CST
7122 || (TREE_CODE (op) == ARRAY_REF
7123 && integer_zerop (TREE_OPERAND (op, 1))
7124 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7125 return integer_one_node;
7126 }
7127
7128 /* If this expression has side effects, show we don't know it to be a
7129 constant. Likewise if it's a pointer or aggregate type since in
7130 those case we only want literals, since those are only optimized
7131 when generating RTL, not later.
7132 And finally, if we are compiling an initializer, not code, we
7133 need to return a definite result now; there's not going to be any
7134 more optimization done. */
7135 if (TREE_SIDE_EFFECTS (arg)
7136 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7137 || POINTER_TYPE_P (TREE_TYPE (arg))
7138 || cfun == 0
7139 || folding_initializer
7140 || force_folding_builtin_constant_p)
7141 return integer_zero_node;
7142
7143 return NULL_TREE;
7144 }
7145
7146 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7147 return it as a truthvalue. */
7148
7149 static tree
7150 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7151 tree predictor)
7152 {
7153 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7154
7155 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7156 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7157 ret_type = TREE_TYPE (TREE_TYPE (fn));
7158 pred_type = TREE_VALUE (arg_types);
7159 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7160
7161 pred = fold_convert_loc (loc, pred_type, pred);
7162 expected = fold_convert_loc (loc, expected_type, expected);
7163 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7164 predictor);
7165
7166 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7167 build_int_cst (ret_type, 0));
7168 }
7169
7170 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7171 NULL_TREE if no simplification is possible. */
7172
7173 tree
7174 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7175 {
7176 tree inner, fndecl, inner_arg0;
7177 enum tree_code code;
7178
7179 /* Distribute the expected value over short-circuiting operators.
7180 See through the cast from truthvalue_type_node to long. */
7181 inner_arg0 = arg0;
7182 while (CONVERT_EXPR_P (inner_arg0)
7183 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7184 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7185 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7186
7187 /* If this is a builtin_expect within a builtin_expect keep the
7188 inner one. See through a comparison against a constant. It
7189 might have been added to create a thruthvalue. */
7190 inner = inner_arg0;
7191
7192 if (COMPARISON_CLASS_P (inner)
7193 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7194 inner = TREE_OPERAND (inner, 0);
7195
7196 if (TREE_CODE (inner) == CALL_EXPR
7197 && (fndecl = get_callee_fndecl (inner))
7198 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7199 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7200 return arg0;
7201
7202 inner = inner_arg0;
7203 code = TREE_CODE (inner);
7204 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7205 {
7206 tree op0 = TREE_OPERAND (inner, 0);
7207 tree op1 = TREE_OPERAND (inner, 1);
7208
7209 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7210 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7211 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7212
7213 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7214 }
7215
7216 /* If the argument isn't invariant then there's nothing else we can do. */
7217 if (!TREE_CONSTANT (inner_arg0))
7218 return NULL_TREE;
7219
7220 /* If we expect that a comparison against the argument will fold to
7221 a constant return the constant. In practice, this means a true
7222 constant or the address of a non-weak symbol. */
7223 inner = inner_arg0;
7224 STRIP_NOPS (inner);
7225 if (TREE_CODE (inner) == ADDR_EXPR)
7226 {
7227 do
7228 {
7229 inner = TREE_OPERAND (inner, 0);
7230 }
7231 while (TREE_CODE (inner) == COMPONENT_REF
7232 || TREE_CODE (inner) == ARRAY_REF);
7233 if ((TREE_CODE (inner) == VAR_DECL
7234 || TREE_CODE (inner) == FUNCTION_DECL)
7235 && DECL_WEAK (inner))
7236 return NULL_TREE;
7237 }
7238
7239 /* Otherwise, ARG0 already has the proper type for the return value. */
7240 return arg0;
7241 }
7242
7243 /* Fold a call to __builtin_classify_type with argument ARG. */
7244
7245 static tree
7246 fold_builtin_classify_type (tree arg)
7247 {
7248 if (arg == 0)
7249 return build_int_cst (integer_type_node, no_type_class);
7250
7251 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7252 }
7253
7254 /* Fold a call to __builtin_strlen with argument ARG. */
7255
7256 static tree
7257 fold_builtin_strlen (location_t loc, tree type, tree arg)
7258 {
7259 if (!validate_arg (arg, POINTER_TYPE))
7260 return NULL_TREE;
7261 else
7262 {
7263 tree len = c_strlen (arg, 0);
7264
7265 if (len)
7266 return fold_convert_loc (loc, type, len);
7267
7268 return NULL_TREE;
7269 }
7270 }
7271
7272 /* If ARG is a foldable constant real, use FN to round it to an integer
7273 value and try to represent the result in integer type ITYPE. Return
7274 the value on success, otherwise return null. */
7275
7276 static tree
7277 do_real_to_int_conversion (tree itype, tree arg,
7278 void (*fn) (REAL_VALUE_TYPE *, machine_mode,
7279 const REAL_VALUE_TYPE *))
7280 {
7281 if (TREE_CODE (arg) != REAL_CST || TREE_OVERFLOW (arg))
7282 return NULL_TREE;
7283
7284 const REAL_VALUE_TYPE *value = TREE_REAL_CST_PTR (arg);
7285 if (!real_isfinite (value))
7286 return NULL_TREE;
7287
7288 tree ftype = TREE_TYPE (arg);
7289 REAL_VALUE_TYPE rounded;
7290 fn (&rounded, TYPE_MODE (ftype), value);
7291
7292 bool fail = false;
7293 wide_int ival = real_to_integer (&rounded, &fail, TYPE_PRECISION (itype));
7294 if (fail)
7295 return NULL_TREE;
7296
7297 return wide_int_to_tree (itype, ival);
7298 }
7299
7300
7301 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7302
7303 static tree
7304 fold_builtin_inf (location_t loc, tree type, int warn)
7305 {
7306 REAL_VALUE_TYPE real;
7307
7308 /* __builtin_inff is intended to be usable to define INFINITY on all
7309 targets. If an infinity is not available, INFINITY expands "to a
7310 positive constant of type float that overflows at translation
7311 time", footnote "In this case, using INFINITY will violate the
7312 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7313 Thus we pedwarn to ensure this constraint violation is
7314 diagnosed. */
7315 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7316 pedwarn (loc, 0, "target format does not support infinity");
7317
7318 real_inf (&real);
7319 return build_real (type, real);
7320 }
7321
7322 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7323
7324 static tree
7325 fold_builtin_nan (tree arg, tree type, int quiet)
7326 {
7327 REAL_VALUE_TYPE real;
7328 const char *str;
7329
7330 if (!validate_arg (arg, POINTER_TYPE))
7331 return NULL_TREE;
7332 str = c_getstr (arg);
7333 if (!str)
7334 return NULL_TREE;
7335
7336 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7337 return NULL_TREE;
7338
7339 return build_real (type, real);
7340 }
7341
7342 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7343 NULL_TREE if no simplification can be made. */
7344
7345 static tree
7346 fold_builtin_sincos (location_t loc,
7347 tree arg0, tree arg1, tree arg2)
7348 {
7349 tree type;
7350 tree res, fn, call;
7351
7352 if (!validate_arg (arg0, REAL_TYPE)
7353 || !validate_arg (arg1, POINTER_TYPE)
7354 || !validate_arg (arg2, POINTER_TYPE))
7355 return NULL_TREE;
7356
7357 type = TREE_TYPE (arg0);
7358
7359 /* Calculate the result when the argument is a constant. */
7360 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7361 return res;
7362
7363 /* Canonicalize sincos to cexpi. */
7364 if (!targetm.libc_has_function (function_c99_math_complex))
7365 return NULL_TREE;
7366 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7367 if (!fn)
7368 return NULL_TREE;
7369
7370 call = build_call_expr_loc (loc, fn, 1, arg0);
7371 call = builtin_save_expr (call);
7372
7373 return build2 (COMPOUND_EXPR, void_type_node,
7374 build2 (MODIFY_EXPR, void_type_node,
7375 build_fold_indirect_ref_loc (loc, arg1),
7376 build1 (IMAGPART_EXPR, type, call)),
7377 build2 (MODIFY_EXPR, void_type_node,
7378 build_fold_indirect_ref_loc (loc, arg2),
7379 build1 (REALPART_EXPR, type, call)));
7380 }
7381
7382 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7383 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7384 the argument to the call. Return NULL_TREE if no simplification can
7385 be made. */
7386
7387 static tree
7388 fold_builtin_bitop (tree fndecl, tree arg)
7389 {
7390 if (!validate_arg (arg, INTEGER_TYPE))
7391 return NULL_TREE;
7392
7393 /* Optimize for constant argument. */
7394 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7395 {
7396 tree type = TREE_TYPE (arg);
7397 int result;
7398
7399 switch (DECL_FUNCTION_CODE (fndecl))
7400 {
7401 CASE_INT_FN (BUILT_IN_FFS):
7402 result = wi::ffs (arg);
7403 break;
7404
7405 CASE_INT_FN (BUILT_IN_CLZ):
7406 if (wi::ne_p (arg, 0))
7407 result = wi::clz (arg);
7408 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7409 result = TYPE_PRECISION (type);
7410 break;
7411
7412 CASE_INT_FN (BUILT_IN_CTZ):
7413 if (wi::ne_p (arg, 0))
7414 result = wi::ctz (arg);
7415 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7416 result = TYPE_PRECISION (type);
7417 break;
7418
7419 CASE_INT_FN (BUILT_IN_CLRSB):
7420 result = wi::clrsb (arg);
7421 break;
7422
7423 CASE_INT_FN (BUILT_IN_POPCOUNT):
7424 result = wi::popcount (arg);
7425 break;
7426
7427 CASE_INT_FN (BUILT_IN_PARITY):
7428 result = wi::parity (arg);
7429 break;
7430
7431 default:
7432 gcc_unreachable ();
7433 }
7434
7435 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7436 }
7437
7438 return NULL_TREE;
7439 }
7440
7441 /* Fold function call to builtin_bswap and the short, long and long long
7442 variants. Return NULL_TREE if no simplification can be made. */
7443 static tree
7444 fold_builtin_bswap (tree fndecl, tree arg)
7445 {
7446 if (! validate_arg (arg, INTEGER_TYPE))
7447 return NULL_TREE;
7448
7449 /* Optimize constant value. */
7450 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7451 {
7452 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7453
7454 switch (DECL_FUNCTION_CODE (fndecl))
7455 {
7456 case BUILT_IN_BSWAP16:
7457 case BUILT_IN_BSWAP32:
7458 case BUILT_IN_BSWAP64:
7459 {
7460 signop sgn = TYPE_SIGN (type);
7461 tree result =
7462 wide_int_to_tree (type,
7463 wide_int::from (arg, TYPE_PRECISION (type),
7464 sgn).bswap ());
7465 return result;
7466 }
7467 default:
7468 gcc_unreachable ();
7469 }
7470 }
7471
7472 return NULL_TREE;
7473 }
7474
7475 /* Fold a builtin function call to pow, powf, or powl. Return
7476 NULL_TREE if no simplification can be made. */
7477 static tree
7478 fold_const_builtin_pow (tree arg0, tree arg1, tree type)
7479 {
7480 tree res;
7481
7482 if (!validate_arg (arg0, REAL_TYPE)
7483 || !validate_arg (arg1, REAL_TYPE))
7484 return NULL_TREE;
7485
7486 /* Calculate the result when the argument is a constant. */
7487 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7488 return res;
7489
7490 /* Check for an integer exponent. */
7491 if (TREE_CODE (arg0) == REAL_CST
7492 && !TREE_OVERFLOW (arg0)
7493 && TREE_CODE (arg1) == REAL_CST
7494 && !TREE_OVERFLOW (arg1))
7495 {
7496 REAL_VALUE_TYPE cint1;
7497 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (arg0);
7498 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (arg1);
7499 HOST_WIDE_INT n1 = real_to_integer (c1);
7500 real_from_integer (&cint1, VOIDmode, n1, SIGNED);
7501 /* Attempt to evaluate pow at compile-time, unless this should
7502 raise an exception. */
7503 if (real_identical (c1, &cint1)
7504 && (n1 > 0
7505 || (!flag_trapping_math && !flag_errno_math)
7506 || !real_equal (c0, &dconst0)))
7507 {
7508 REAL_VALUE_TYPE x;
7509 bool inexact = real_powi (&x, TYPE_MODE (type), c0, n1);
7510 if (flag_unsafe_math_optimizations || !inexact)
7511 return build_real (type, x);
7512 }
7513 }
7514
7515 return NULL_TREE;
7516 }
7517
7518 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7519 arguments to the call, and TYPE is its return type.
7520 Return NULL_TREE if no simplification can be made. */
7521
7522 static tree
7523 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7524 {
7525 if (!validate_arg (arg1, POINTER_TYPE)
7526 || !validate_arg (arg2, INTEGER_TYPE)
7527 || !validate_arg (len, INTEGER_TYPE))
7528 return NULL_TREE;
7529 else
7530 {
7531 const char *p1;
7532
7533 if (TREE_CODE (arg2) != INTEGER_CST
7534 || !tree_fits_uhwi_p (len))
7535 return NULL_TREE;
7536
7537 p1 = c_getstr (arg1);
7538 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7539 {
7540 char c;
7541 const char *r;
7542 tree tem;
7543
7544 if (target_char_cast (arg2, &c))
7545 return NULL_TREE;
7546
7547 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7548
7549 if (r == NULL)
7550 return build_int_cst (TREE_TYPE (arg1), 0);
7551
7552 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7553 return fold_convert_loc (loc, type, tem);
7554 }
7555 return NULL_TREE;
7556 }
7557 }
7558
7559 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7560 Return NULL_TREE if no simplification can be made. */
7561
7562 static tree
7563 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7564 {
7565 const char *p1, *p2;
7566
7567 if (!validate_arg (arg1, POINTER_TYPE)
7568 || !validate_arg (arg2, POINTER_TYPE)
7569 || !validate_arg (len, INTEGER_TYPE))
7570 return NULL_TREE;
7571
7572 /* If the LEN parameter is zero, return zero. */
7573 if (integer_zerop (len))
7574 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7575 arg1, arg2);
7576
7577 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7578 if (operand_equal_p (arg1, arg2, 0))
7579 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7580
7581 p1 = c_getstr (arg1);
7582 p2 = c_getstr (arg2);
7583
7584 /* If all arguments are constant, and the value of len is not greater
7585 than the lengths of arg1 and arg2, evaluate at compile-time. */
7586 if (tree_fits_uhwi_p (len) && p1 && p2
7587 && compare_tree_int (len, strlen (p1) + 1) <= 0
7588 && compare_tree_int (len, strlen (p2) + 1) <= 0)
7589 {
7590 const int r = memcmp (p1, p2, tree_to_uhwi (len));
7591
7592 if (r > 0)
7593 return integer_one_node;
7594 else if (r < 0)
7595 return integer_minus_one_node;
7596 else
7597 return integer_zero_node;
7598 }
7599
7600 /* If len parameter is one, return an expression corresponding to
7601 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7602 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7603 {
7604 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7605 tree cst_uchar_ptr_node
7606 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7607
7608 tree ind1
7609 = fold_convert_loc (loc, integer_type_node,
7610 build1 (INDIRECT_REF, cst_uchar_node,
7611 fold_convert_loc (loc,
7612 cst_uchar_ptr_node,
7613 arg1)));
7614 tree ind2
7615 = fold_convert_loc (loc, integer_type_node,
7616 build1 (INDIRECT_REF, cst_uchar_node,
7617 fold_convert_loc (loc,
7618 cst_uchar_ptr_node,
7619 arg2)));
7620 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7621 }
7622
7623 return NULL_TREE;
7624 }
7625
7626 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7627 Return NULL_TREE if no simplification can be made. */
7628
7629 static tree
7630 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7631 {
7632 const char *p1, *p2;
7633
7634 if (!validate_arg (arg1, POINTER_TYPE)
7635 || !validate_arg (arg2, POINTER_TYPE))
7636 return NULL_TREE;
7637
7638 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7639 if (operand_equal_p (arg1, arg2, 0))
7640 return integer_zero_node;
7641
7642 p1 = c_getstr (arg1);
7643 p2 = c_getstr (arg2);
7644
7645 if (p1 && p2)
7646 {
7647 const int i = strcmp (p1, p2);
7648 if (i < 0)
7649 return integer_minus_one_node;
7650 else if (i > 0)
7651 return integer_one_node;
7652 else
7653 return integer_zero_node;
7654 }
7655
7656 /* If the second arg is "", return *(const unsigned char*)arg1. */
7657 if (p2 && *p2 == '\0')
7658 {
7659 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7660 tree cst_uchar_ptr_node
7661 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7662
7663 return fold_convert_loc (loc, integer_type_node,
7664 build1 (INDIRECT_REF, cst_uchar_node,
7665 fold_convert_loc (loc,
7666 cst_uchar_ptr_node,
7667 arg1)));
7668 }
7669
7670 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7671 if (p1 && *p1 == '\0')
7672 {
7673 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7674 tree cst_uchar_ptr_node
7675 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7676
7677 tree temp
7678 = fold_convert_loc (loc, integer_type_node,
7679 build1 (INDIRECT_REF, cst_uchar_node,
7680 fold_convert_loc (loc,
7681 cst_uchar_ptr_node,
7682 arg2)));
7683 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7684 }
7685
7686 return NULL_TREE;
7687 }
7688
7689 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7690 Return NULL_TREE if no simplification can be made. */
7691
7692 static tree
7693 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7694 {
7695 const char *p1, *p2;
7696
7697 if (!validate_arg (arg1, POINTER_TYPE)
7698 || !validate_arg (arg2, POINTER_TYPE)
7699 || !validate_arg (len, INTEGER_TYPE))
7700 return NULL_TREE;
7701
7702 /* If the LEN parameter is zero, return zero. */
7703 if (integer_zerop (len))
7704 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7705 arg1, arg2);
7706
7707 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7708 if (operand_equal_p (arg1, arg2, 0))
7709 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7710
7711 p1 = c_getstr (arg1);
7712 p2 = c_getstr (arg2);
7713
7714 if (tree_fits_uhwi_p (len) && p1 && p2)
7715 {
7716 const int i = strncmp (p1, p2, tree_to_uhwi (len));
7717 if (i > 0)
7718 return integer_one_node;
7719 else if (i < 0)
7720 return integer_minus_one_node;
7721 else
7722 return integer_zero_node;
7723 }
7724
7725 /* If the second arg is "", and the length is greater than zero,
7726 return *(const unsigned char*)arg1. */
7727 if (p2 && *p2 == '\0'
7728 && TREE_CODE (len) == INTEGER_CST
7729 && tree_int_cst_sgn (len) == 1)
7730 {
7731 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7732 tree cst_uchar_ptr_node
7733 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7734
7735 return fold_convert_loc (loc, integer_type_node,
7736 build1 (INDIRECT_REF, cst_uchar_node,
7737 fold_convert_loc (loc,
7738 cst_uchar_ptr_node,
7739 arg1)));
7740 }
7741
7742 /* If the first arg is "", and the length is greater than zero,
7743 return -*(const unsigned char*)arg2. */
7744 if (p1 && *p1 == '\0'
7745 && TREE_CODE (len) == INTEGER_CST
7746 && tree_int_cst_sgn (len) == 1)
7747 {
7748 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7749 tree cst_uchar_ptr_node
7750 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7751
7752 tree temp = fold_convert_loc (loc, integer_type_node,
7753 build1 (INDIRECT_REF, cst_uchar_node,
7754 fold_convert_loc (loc,
7755 cst_uchar_ptr_node,
7756 arg2)));
7757 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7758 }
7759
7760 /* If len parameter is one, return an expression corresponding to
7761 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7762 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7763 {
7764 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7765 tree cst_uchar_ptr_node
7766 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7767
7768 tree ind1 = fold_convert_loc (loc, integer_type_node,
7769 build1 (INDIRECT_REF, cst_uchar_node,
7770 fold_convert_loc (loc,
7771 cst_uchar_ptr_node,
7772 arg1)));
7773 tree ind2 = fold_convert_loc (loc, integer_type_node,
7774 build1 (INDIRECT_REF, cst_uchar_node,
7775 fold_convert_loc (loc,
7776 cst_uchar_ptr_node,
7777 arg2)));
7778 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7779 }
7780
7781 return NULL_TREE;
7782 }
7783
7784 /* Fold a call to builtin isascii with argument ARG. */
7785
7786 static tree
7787 fold_builtin_isascii (location_t loc, tree arg)
7788 {
7789 if (!validate_arg (arg, INTEGER_TYPE))
7790 return NULL_TREE;
7791 else
7792 {
7793 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7794 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7795 build_int_cst (integer_type_node,
7796 ~ (unsigned HOST_WIDE_INT) 0x7f));
7797 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7798 arg, integer_zero_node);
7799 }
7800 }
7801
7802 /* Fold a call to builtin toascii with argument ARG. */
7803
7804 static tree
7805 fold_builtin_toascii (location_t loc, tree arg)
7806 {
7807 if (!validate_arg (arg, INTEGER_TYPE))
7808 return NULL_TREE;
7809
7810 /* Transform toascii(c) -> (c & 0x7f). */
7811 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7812 build_int_cst (integer_type_node, 0x7f));
7813 }
7814
7815 /* Fold a call to builtin isdigit with argument ARG. */
7816
7817 static tree
7818 fold_builtin_isdigit (location_t loc, tree arg)
7819 {
7820 if (!validate_arg (arg, INTEGER_TYPE))
7821 return NULL_TREE;
7822 else
7823 {
7824 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7825 /* According to the C standard, isdigit is unaffected by locale.
7826 However, it definitely is affected by the target character set. */
7827 unsigned HOST_WIDE_INT target_digit0
7828 = lang_hooks.to_target_charset ('0');
7829
7830 if (target_digit0 == 0)
7831 return NULL_TREE;
7832
7833 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7834 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7835 build_int_cst (unsigned_type_node, target_digit0));
7836 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7837 build_int_cst (unsigned_type_node, 9));
7838 }
7839 }
7840
7841 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7842
7843 static tree
7844 fold_builtin_fabs (location_t loc, tree arg, tree type)
7845 {
7846 if (!validate_arg (arg, REAL_TYPE))
7847 return NULL_TREE;
7848
7849 arg = fold_convert_loc (loc, type, arg);
7850 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7851 }
7852
7853 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7854
7855 static tree
7856 fold_builtin_abs (location_t loc, tree arg, tree type)
7857 {
7858 if (!validate_arg (arg, INTEGER_TYPE))
7859 return NULL_TREE;
7860
7861 arg = fold_convert_loc (loc, type, arg);
7862 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7863 }
7864
7865 /* Fold a fma operation with arguments ARG[012]. */
7866
7867 tree
7868 fold_fma (location_t loc ATTRIBUTE_UNUSED,
7869 tree type, tree arg0, tree arg1, tree arg2)
7870 {
7871 if (TREE_CODE (arg0) == REAL_CST
7872 && TREE_CODE (arg1) == REAL_CST
7873 && TREE_CODE (arg2) == REAL_CST)
7874 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
7875
7876 return NULL_TREE;
7877 }
7878
7879 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7880
7881 static tree
7882 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7883 {
7884 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7885 if (validate_arg (arg0, REAL_TYPE)
7886 && validate_arg (arg1, REAL_TYPE)
7887 && validate_arg (arg2, REAL_TYPE)
7888 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7889 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7890
7891 return NULL_TREE;
7892 }
7893
7894 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7895
7896 static tree
7897 fold_builtin_carg (location_t loc, tree arg, tree type)
7898 {
7899 if (validate_arg (arg, COMPLEX_TYPE)
7900 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7901 {
7902 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7903
7904 if (atan2_fn)
7905 {
7906 tree new_arg = builtin_save_expr (arg);
7907 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7908 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7909 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7910 }
7911 }
7912
7913 return NULL_TREE;
7914 }
7915
7916 /* Fold a call to builtin logb/ilogb. */
7917
7918 static tree
7919 fold_const_builtin_logb (location_t loc, tree arg, tree rettype)
7920 {
7921 if (! validate_arg (arg, REAL_TYPE))
7922 return NULL_TREE;
7923
7924 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
7925 {
7926 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
7927
7928 switch (value->cl)
7929 {
7930 case rvc_nan:
7931 case rvc_inf:
7932 /* If arg is Inf or NaN and we're logb, return it. */
7933 if (TREE_CODE (rettype) == REAL_TYPE)
7934 {
7935 /* For logb(-Inf) we have to return +Inf. */
7936 if (real_isinf (value) && real_isneg (value))
7937 {
7938 REAL_VALUE_TYPE tem;
7939 real_inf (&tem);
7940 return build_real (rettype, tem);
7941 }
7942 return fold_convert_loc (loc, rettype, arg);
7943 }
7944 /* Fall through... */
7945 case rvc_zero:
7946 /* Zero may set errno and/or raise an exception for logb, also
7947 for ilogb we don't know FP_ILOGB0. */
7948 return NULL_TREE;
7949 case rvc_normal:
7950 /* For normal numbers, proceed iff radix == 2. In GCC,
7951 normalized significands are in the range [0.5, 1.0). We
7952 want the exponent as if they were [1.0, 2.0) so get the
7953 exponent and subtract 1. */
7954 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
7955 return fold_convert_loc (loc, rettype,
7956 build_int_cst (integer_type_node,
7957 REAL_EXP (value)-1));
7958 break;
7959 }
7960 }
7961
7962 return NULL_TREE;
7963 }
7964
7965 /* Fold a call to builtin significand, if radix == 2. */
7966
7967 static tree
7968 fold_const_builtin_significand (location_t loc, tree arg, tree rettype)
7969 {
7970 if (! validate_arg (arg, REAL_TYPE))
7971 return NULL_TREE;
7972
7973 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
7974 {
7975 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
7976
7977 switch (value->cl)
7978 {
7979 case rvc_zero:
7980 case rvc_nan:
7981 case rvc_inf:
7982 /* If arg is +-0, +-Inf or +-NaN, then return it. */
7983 return fold_convert_loc (loc, rettype, arg);
7984 case rvc_normal:
7985 /* For normal numbers, proceed iff radix == 2. */
7986 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
7987 {
7988 REAL_VALUE_TYPE result = *value;
7989 /* In GCC, normalized significands are in the range [0.5,
7990 1.0). We want them to be [1.0, 2.0) so set the
7991 exponent to 1. */
7992 SET_REAL_EXP (&result, 1);
7993 return build_real (rettype, result);
7994 }
7995 break;
7996 }
7997 }
7998
7999 return NULL_TREE;
8000 }
8001
8002 /* Fold a call to builtin frexp, we can assume the base is 2. */
8003
8004 static tree
8005 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8006 {
8007 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8008 return NULL_TREE;
8009
8010 STRIP_NOPS (arg0);
8011
8012 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8013 return NULL_TREE;
8014
8015 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8016
8017 /* Proceed if a valid pointer type was passed in. */
8018 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8019 {
8020 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8021 tree frac, exp;
8022
8023 switch (value->cl)
8024 {
8025 case rvc_zero:
8026 /* For +-0, return (*exp = 0, +-0). */
8027 exp = integer_zero_node;
8028 frac = arg0;
8029 break;
8030 case rvc_nan:
8031 case rvc_inf:
8032 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8033 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8034 case rvc_normal:
8035 {
8036 /* Since the frexp function always expects base 2, and in
8037 GCC normalized significands are already in the range
8038 [0.5, 1.0), we have exactly what frexp wants. */
8039 REAL_VALUE_TYPE frac_rvt = *value;
8040 SET_REAL_EXP (&frac_rvt, 0);
8041 frac = build_real (rettype, frac_rvt);
8042 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8043 }
8044 break;
8045 default:
8046 gcc_unreachable ();
8047 }
8048
8049 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8050 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8051 TREE_SIDE_EFFECTS (arg1) = 1;
8052 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8053 }
8054
8055 return NULL_TREE;
8056 }
8057
8058 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8059 then we can assume the base is two. If it's false, then we have to
8060 check the mode of the TYPE parameter in certain cases. */
8061
8062 static tree
8063 fold_const_builtin_load_exponent (tree arg0, tree arg1,
8064 tree type, bool ldexp)
8065 {
8066 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
8067 {
8068 /* If both arguments are constant, then try to evaluate it. */
8069 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
8070 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
8071 && tree_fits_shwi_p (arg1))
8072 {
8073 /* Bound the maximum adjustment to twice the range of the
8074 mode's valid exponents. Use abs to ensure the range is
8075 positive as a sanity check. */
8076 const long max_exp_adj = 2 *
8077 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
8078 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
8079
8080 /* Get the user-requested adjustment. */
8081 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
8082
8083 /* The requested adjustment must be inside this range. This
8084 is a preliminary cap to avoid things like overflow, we
8085 may still fail to compute the result for other reasons. */
8086 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
8087 {
8088 REAL_VALUE_TYPE initial_result;
8089
8090 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
8091
8092 /* Ensure we didn't overflow. */
8093 if (! real_isinf (&initial_result))
8094 {
8095 const REAL_VALUE_TYPE trunc_result
8096 = real_value_truncate (TYPE_MODE (type), initial_result);
8097
8098 /* Only proceed if the target mode can hold the
8099 resulting value. */
8100 if (real_equal (&initial_result, &trunc_result))
8101 return build_real (type, trunc_result);
8102 }
8103 }
8104 }
8105 }
8106
8107 return NULL_TREE;
8108 }
8109
8110 /* Fold a call to builtin modf. */
8111
8112 static tree
8113 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8114 {
8115 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8116 return NULL_TREE;
8117
8118 STRIP_NOPS (arg0);
8119
8120 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8121 return NULL_TREE;
8122
8123 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8124
8125 /* Proceed if a valid pointer type was passed in. */
8126 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8127 {
8128 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8129 REAL_VALUE_TYPE trunc, frac;
8130
8131 switch (value->cl)
8132 {
8133 case rvc_nan:
8134 case rvc_zero:
8135 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8136 trunc = frac = *value;
8137 break;
8138 case rvc_inf:
8139 /* For +-Inf, return (*arg1 = arg0, +-0). */
8140 frac = dconst0;
8141 frac.sign = value->sign;
8142 trunc = *value;
8143 break;
8144 case rvc_normal:
8145 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8146 real_trunc (&trunc, VOIDmode, value);
8147 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8148 /* If the original number was negative and already
8149 integral, then the fractional part is -0.0. */
8150 if (value->sign && frac.cl == rvc_zero)
8151 frac.sign = value->sign;
8152 break;
8153 }
8154
8155 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8156 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8157 build_real (rettype, trunc));
8158 TREE_SIDE_EFFECTS (arg1) = 1;
8159 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8160 build_real (rettype, frac));
8161 }
8162
8163 return NULL_TREE;
8164 }
8165
8166 /* Given a location LOC, an interclass builtin function decl FNDECL
8167 and its single argument ARG, return an folded expression computing
8168 the same, or NULL_TREE if we either couldn't or didn't want to fold
8169 (the latter happen if there's an RTL instruction available). */
8170
8171 static tree
8172 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8173 {
8174 machine_mode mode;
8175
8176 if (!validate_arg (arg, REAL_TYPE))
8177 return NULL_TREE;
8178
8179 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8180 return NULL_TREE;
8181
8182 mode = TYPE_MODE (TREE_TYPE (arg));
8183
8184 /* If there is no optab, try generic code. */
8185 switch (DECL_FUNCTION_CODE (fndecl))
8186 {
8187 tree result;
8188
8189 CASE_FLT_FN (BUILT_IN_ISINF):
8190 {
8191 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8192 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8193 tree const type = TREE_TYPE (arg);
8194 REAL_VALUE_TYPE r;
8195 char buf[128];
8196
8197 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8198 real_from_string (&r, buf);
8199 result = build_call_expr (isgr_fn, 2,
8200 fold_build1_loc (loc, ABS_EXPR, type, arg),
8201 build_real (type, r));
8202 return result;
8203 }
8204 CASE_FLT_FN (BUILT_IN_FINITE):
8205 case BUILT_IN_ISFINITE:
8206 {
8207 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8208 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8209 tree const type = TREE_TYPE (arg);
8210 REAL_VALUE_TYPE r;
8211 char buf[128];
8212
8213 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8214 real_from_string (&r, buf);
8215 result = build_call_expr (isle_fn, 2,
8216 fold_build1_loc (loc, ABS_EXPR, type, arg),
8217 build_real (type, r));
8218 /*result = fold_build2_loc (loc, UNGT_EXPR,
8219 TREE_TYPE (TREE_TYPE (fndecl)),
8220 fold_build1_loc (loc, ABS_EXPR, type, arg),
8221 build_real (type, r));
8222 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8223 TREE_TYPE (TREE_TYPE (fndecl)),
8224 result);*/
8225 return result;
8226 }
8227 case BUILT_IN_ISNORMAL:
8228 {
8229 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8230 islessequal(fabs(x),DBL_MAX). */
8231 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8232 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8233 tree const type = TREE_TYPE (arg);
8234 REAL_VALUE_TYPE rmax, rmin;
8235 char buf[128];
8236
8237 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8238 real_from_string (&rmax, buf);
8239 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8240 real_from_string (&rmin, buf);
8241 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8242 result = build_call_expr (isle_fn, 2, arg,
8243 build_real (type, rmax));
8244 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
8245 build_call_expr (isge_fn, 2, arg,
8246 build_real (type, rmin)));
8247 return result;
8248 }
8249 default:
8250 break;
8251 }
8252
8253 return NULL_TREE;
8254 }
8255
8256 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8257 ARG is the argument for the call. */
8258
8259 static tree
8260 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8261 {
8262 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8263 REAL_VALUE_TYPE r;
8264
8265 if (!validate_arg (arg, REAL_TYPE))
8266 return NULL_TREE;
8267
8268 switch (builtin_index)
8269 {
8270 case BUILT_IN_ISINF:
8271 if (!HONOR_INFINITIES (arg))
8272 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8273
8274 if (TREE_CODE (arg) == REAL_CST)
8275 {
8276 r = TREE_REAL_CST (arg);
8277 if (real_isinf (&r))
8278 return real_compare (GT_EXPR, &r, &dconst0)
8279 ? integer_one_node : integer_minus_one_node;
8280 else
8281 return integer_zero_node;
8282 }
8283
8284 return NULL_TREE;
8285
8286 case BUILT_IN_ISINF_SIGN:
8287 {
8288 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8289 /* In a boolean context, GCC will fold the inner COND_EXPR to
8290 1. So e.g. "if (isinf_sign(x))" would be folded to just
8291 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8292 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
8293 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8294 tree tmp = NULL_TREE;
8295
8296 arg = builtin_save_expr (arg);
8297
8298 if (signbit_fn && isinf_fn)
8299 {
8300 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8301 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8302
8303 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8304 signbit_call, integer_zero_node);
8305 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8306 isinf_call, integer_zero_node);
8307
8308 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8309 integer_minus_one_node, integer_one_node);
8310 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8311 isinf_call, tmp,
8312 integer_zero_node);
8313 }
8314
8315 return tmp;
8316 }
8317
8318 case BUILT_IN_ISFINITE:
8319 if (!HONOR_NANS (arg)
8320 && !HONOR_INFINITIES (arg))
8321 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8322
8323 if (TREE_CODE (arg) == REAL_CST)
8324 {
8325 r = TREE_REAL_CST (arg);
8326 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
8327 }
8328
8329 return NULL_TREE;
8330
8331 case BUILT_IN_ISNAN:
8332 if (!HONOR_NANS (arg))
8333 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8334
8335 if (TREE_CODE (arg) == REAL_CST)
8336 {
8337 r = TREE_REAL_CST (arg);
8338 return real_isnan (&r) ? integer_one_node : integer_zero_node;
8339 }
8340
8341 arg = builtin_save_expr (arg);
8342 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8343
8344 default:
8345 gcc_unreachable ();
8346 }
8347 }
8348
8349 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8350 This builtin will generate code to return the appropriate floating
8351 point classification depending on the value of the floating point
8352 number passed in. The possible return values must be supplied as
8353 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8354 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8355 one floating point argument which is "type generic". */
8356
8357 static tree
8358 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8359 {
8360 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8361 arg, type, res, tmp;
8362 machine_mode mode;
8363 REAL_VALUE_TYPE r;
8364 char buf[128];
8365
8366 /* Verify the required arguments in the original call. */
8367 if (nargs != 6
8368 || !validate_arg (args[0], INTEGER_TYPE)
8369 || !validate_arg (args[1], INTEGER_TYPE)
8370 || !validate_arg (args[2], INTEGER_TYPE)
8371 || !validate_arg (args[3], INTEGER_TYPE)
8372 || !validate_arg (args[4], INTEGER_TYPE)
8373 || !validate_arg (args[5], REAL_TYPE))
8374 return NULL_TREE;
8375
8376 fp_nan = args[0];
8377 fp_infinite = args[1];
8378 fp_normal = args[2];
8379 fp_subnormal = args[3];
8380 fp_zero = args[4];
8381 arg = args[5];
8382 type = TREE_TYPE (arg);
8383 mode = TYPE_MODE (type);
8384 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8385
8386 /* fpclassify(x) ->
8387 isnan(x) ? FP_NAN :
8388 (fabs(x) == Inf ? FP_INFINITE :
8389 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8390 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8391
8392 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8393 build_real (type, dconst0));
8394 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8395 tmp, fp_zero, fp_subnormal);
8396
8397 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8398 real_from_string (&r, buf);
8399 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8400 arg, build_real (type, r));
8401 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8402
8403 if (HONOR_INFINITIES (mode))
8404 {
8405 real_inf (&r);
8406 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8407 build_real (type, r));
8408 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8409 fp_infinite, res);
8410 }
8411
8412 if (HONOR_NANS (mode))
8413 {
8414 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8415 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8416 }
8417
8418 return res;
8419 }
8420
8421 /* Fold a call to an unordered comparison function such as
8422 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8423 being called and ARG0 and ARG1 are the arguments for the call.
8424 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8425 the opposite of the desired result. UNORDERED_CODE is used
8426 for modes that can hold NaNs and ORDERED_CODE is used for
8427 the rest. */
8428
8429 static tree
8430 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8431 enum tree_code unordered_code,
8432 enum tree_code ordered_code)
8433 {
8434 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8435 enum tree_code code;
8436 tree type0, type1;
8437 enum tree_code code0, code1;
8438 tree cmp_type = NULL_TREE;
8439
8440 type0 = TREE_TYPE (arg0);
8441 type1 = TREE_TYPE (arg1);
8442
8443 code0 = TREE_CODE (type0);
8444 code1 = TREE_CODE (type1);
8445
8446 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8447 /* Choose the wider of two real types. */
8448 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8449 ? type0 : type1;
8450 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8451 cmp_type = type0;
8452 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8453 cmp_type = type1;
8454
8455 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8456 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8457
8458 if (unordered_code == UNORDERED_EXPR)
8459 {
8460 if (!HONOR_NANS (arg0))
8461 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8462 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8463 }
8464
8465 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8466 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8467 fold_build2_loc (loc, code, type, arg0, arg1));
8468 }
8469
8470 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8471 arithmetics if it can never overflow, or into internal functions that
8472 return both result of arithmetics and overflowed boolean flag in
8473 a complex integer result, or some other check for overflow. */
8474
8475 static tree
8476 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8477 tree arg0, tree arg1, tree arg2)
8478 {
8479 enum internal_fn ifn = IFN_LAST;
8480 tree type = TREE_TYPE (TREE_TYPE (arg2));
8481 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8482 switch (fcode)
8483 {
8484 case BUILT_IN_ADD_OVERFLOW:
8485 case BUILT_IN_SADD_OVERFLOW:
8486 case BUILT_IN_SADDL_OVERFLOW:
8487 case BUILT_IN_SADDLL_OVERFLOW:
8488 case BUILT_IN_UADD_OVERFLOW:
8489 case BUILT_IN_UADDL_OVERFLOW:
8490 case BUILT_IN_UADDLL_OVERFLOW:
8491 ifn = IFN_ADD_OVERFLOW;
8492 break;
8493 case BUILT_IN_SUB_OVERFLOW:
8494 case BUILT_IN_SSUB_OVERFLOW:
8495 case BUILT_IN_SSUBL_OVERFLOW:
8496 case BUILT_IN_SSUBLL_OVERFLOW:
8497 case BUILT_IN_USUB_OVERFLOW:
8498 case BUILT_IN_USUBL_OVERFLOW:
8499 case BUILT_IN_USUBLL_OVERFLOW:
8500 ifn = IFN_SUB_OVERFLOW;
8501 break;
8502 case BUILT_IN_MUL_OVERFLOW:
8503 case BUILT_IN_SMUL_OVERFLOW:
8504 case BUILT_IN_SMULL_OVERFLOW:
8505 case BUILT_IN_SMULLL_OVERFLOW:
8506 case BUILT_IN_UMUL_OVERFLOW:
8507 case BUILT_IN_UMULL_OVERFLOW:
8508 case BUILT_IN_UMULLL_OVERFLOW:
8509 ifn = IFN_MUL_OVERFLOW;
8510 break;
8511 default:
8512 gcc_unreachable ();
8513 }
8514 tree ctype = build_complex_type (type);
8515 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8516 2, arg0, arg1);
8517 tree tgt = save_expr (call);
8518 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8519 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8520 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8521 tree store
8522 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8523 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8524 }
8525
8526 /* Fold a call to built-in function FNDECL with 0 arguments.
8527 This function returns NULL_TREE if no simplification was possible. */
8528
8529 static tree
8530 fold_builtin_0 (location_t loc, tree fndecl)
8531 {
8532 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8533 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8534 switch (fcode)
8535 {
8536 CASE_FLT_FN (BUILT_IN_INF):
8537 case BUILT_IN_INFD32:
8538 case BUILT_IN_INFD64:
8539 case BUILT_IN_INFD128:
8540 return fold_builtin_inf (loc, type, true);
8541
8542 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8543 return fold_builtin_inf (loc, type, false);
8544
8545 case BUILT_IN_CLASSIFY_TYPE:
8546 return fold_builtin_classify_type (NULL_TREE);
8547
8548 default:
8549 break;
8550 }
8551 return NULL_TREE;
8552 }
8553
8554 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8555 This function returns NULL_TREE if no simplification was possible. */
8556
8557 static tree
8558 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8559 {
8560 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8561 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8562 switch (fcode)
8563 {
8564 case BUILT_IN_CONSTANT_P:
8565 {
8566 tree val = fold_builtin_constant_p (arg0);
8567
8568 /* Gimplification will pull the CALL_EXPR for the builtin out of
8569 an if condition. When not optimizing, we'll not CSE it back.
8570 To avoid link error types of regressions, return false now. */
8571 if (!val && !optimize)
8572 val = integer_zero_node;
8573
8574 return val;
8575 }
8576
8577 case BUILT_IN_CLASSIFY_TYPE:
8578 return fold_builtin_classify_type (arg0);
8579
8580 case BUILT_IN_STRLEN:
8581 return fold_builtin_strlen (loc, type, arg0);
8582
8583 CASE_FLT_FN (BUILT_IN_FABS):
8584 case BUILT_IN_FABSD32:
8585 case BUILT_IN_FABSD64:
8586 case BUILT_IN_FABSD128:
8587 return fold_builtin_fabs (loc, arg0, type);
8588
8589 case BUILT_IN_ABS:
8590 case BUILT_IN_LABS:
8591 case BUILT_IN_LLABS:
8592 case BUILT_IN_IMAXABS:
8593 return fold_builtin_abs (loc, arg0, type);
8594
8595 CASE_FLT_FN (BUILT_IN_CONJ):
8596 if (validate_arg (arg0, COMPLEX_TYPE)
8597 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8598 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8599 break;
8600
8601 CASE_FLT_FN (BUILT_IN_CREAL):
8602 if (validate_arg (arg0, COMPLEX_TYPE)
8603 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8604 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8605 break;
8606
8607 CASE_FLT_FN (BUILT_IN_CIMAG):
8608 if (validate_arg (arg0, COMPLEX_TYPE)
8609 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8610 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8611 break;
8612
8613 CASE_FLT_FN (BUILT_IN_CCOS):
8614 if (validate_arg (arg0, COMPLEX_TYPE)
8615 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8616 return do_mpc_arg1 (arg0, type, mpc_cos);
8617 break;
8618
8619 CASE_FLT_FN (BUILT_IN_CCOSH):
8620 if (validate_arg (arg0, COMPLEX_TYPE)
8621 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8622 return do_mpc_arg1 (arg0, type, mpc_cosh);
8623 break;
8624
8625 CASE_FLT_FN (BUILT_IN_CPROJ):
8626 if (TREE_CODE (arg0) == COMPLEX_CST
8627 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8628 {
8629 const REAL_VALUE_TYPE *real
8630 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
8631 const REAL_VALUE_TYPE *imag
8632 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
8633
8634 if (real_isinf (real) || real_isinf (imag))
8635 return build_complex_inf (type, imag->sign);
8636 else
8637 return arg0;
8638 }
8639 break;
8640
8641 CASE_FLT_FN (BUILT_IN_CSIN):
8642 if (validate_arg (arg0, COMPLEX_TYPE)
8643 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8644 return do_mpc_arg1 (arg0, type, mpc_sin);
8645 break;
8646
8647 CASE_FLT_FN (BUILT_IN_CSINH):
8648 if (validate_arg (arg0, COMPLEX_TYPE)
8649 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8650 return do_mpc_arg1 (arg0, type, mpc_sinh);
8651 break;
8652
8653 CASE_FLT_FN (BUILT_IN_CTAN):
8654 if (validate_arg (arg0, COMPLEX_TYPE)
8655 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8656 return do_mpc_arg1 (arg0, type, mpc_tan);
8657 break;
8658
8659 CASE_FLT_FN (BUILT_IN_CTANH):
8660 if (validate_arg (arg0, COMPLEX_TYPE)
8661 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8662 return do_mpc_arg1 (arg0, type, mpc_tanh);
8663 break;
8664
8665 CASE_FLT_FN (BUILT_IN_CLOG):
8666 if (validate_arg (arg0, COMPLEX_TYPE)
8667 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8668 return do_mpc_arg1 (arg0, type, mpc_log);
8669 break;
8670
8671 CASE_FLT_FN (BUILT_IN_CSQRT):
8672 if (validate_arg (arg0, COMPLEX_TYPE)
8673 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8674 return do_mpc_arg1 (arg0, type, mpc_sqrt);
8675 break;
8676
8677 CASE_FLT_FN (BUILT_IN_CASIN):
8678 if (validate_arg (arg0, COMPLEX_TYPE)
8679 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8680 return do_mpc_arg1 (arg0, type, mpc_asin);
8681 break;
8682
8683 CASE_FLT_FN (BUILT_IN_CACOS):
8684 if (validate_arg (arg0, COMPLEX_TYPE)
8685 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8686 return do_mpc_arg1 (arg0, type, mpc_acos);
8687 break;
8688
8689 CASE_FLT_FN (BUILT_IN_CATAN):
8690 if (validate_arg (arg0, COMPLEX_TYPE)
8691 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8692 return do_mpc_arg1 (arg0, type, mpc_atan);
8693 break;
8694
8695 CASE_FLT_FN (BUILT_IN_CASINH):
8696 if (validate_arg (arg0, COMPLEX_TYPE)
8697 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8698 return do_mpc_arg1 (arg0, type, mpc_asinh);
8699 break;
8700
8701 CASE_FLT_FN (BUILT_IN_CACOSH):
8702 if (validate_arg (arg0, COMPLEX_TYPE)
8703 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8704 return do_mpc_arg1 (arg0, type, mpc_acosh);
8705 break;
8706
8707 CASE_FLT_FN (BUILT_IN_CATANH):
8708 if (validate_arg (arg0, COMPLEX_TYPE)
8709 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8710 return do_mpc_arg1 (arg0, type, mpc_atanh);
8711 break;
8712
8713 CASE_FLT_FN (BUILT_IN_CABS):
8714 if (TREE_CODE (arg0) == COMPLEX_CST
8715 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8716 return do_mpfr_arg2 (TREE_REALPART (arg0), TREE_IMAGPART (arg0),
8717 type, mpfr_hypot);
8718 break;
8719
8720 CASE_FLT_FN (BUILT_IN_CARG):
8721 return fold_builtin_carg (loc, arg0, type);
8722
8723 CASE_FLT_FN (BUILT_IN_SQRT):
8724 if (validate_arg (arg0, REAL_TYPE))
8725 return do_mpfr_arg1 (arg0, type, mpfr_sqrt, &dconst0, NULL, true);
8726 break;
8727
8728 CASE_FLT_FN (BUILT_IN_CBRT):
8729 if (validate_arg (arg0, REAL_TYPE))
8730 return do_mpfr_arg1 (arg0, type, mpfr_cbrt, NULL, NULL, 0);
8731 break;
8732
8733 CASE_FLT_FN (BUILT_IN_ASIN):
8734 if (validate_arg (arg0, REAL_TYPE))
8735 return do_mpfr_arg1 (arg0, type, mpfr_asin,
8736 &dconstm1, &dconst1, true);
8737 break;
8738
8739 CASE_FLT_FN (BUILT_IN_ACOS):
8740 if (validate_arg (arg0, REAL_TYPE))
8741 return do_mpfr_arg1 (arg0, type, mpfr_acos,
8742 &dconstm1, &dconst1, true);
8743 break;
8744
8745 CASE_FLT_FN (BUILT_IN_ATAN):
8746 if (validate_arg (arg0, REAL_TYPE))
8747 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
8748 break;
8749
8750 CASE_FLT_FN (BUILT_IN_ASINH):
8751 if (validate_arg (arg0, REAL_TYPE))
8752 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
8753 break;
8754
8755 CASE_FLT_FN (BUILT_IN_ACOSH):
8756 if (validate_arg (arg0, REAL_TYPE))
8757 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
8758 &dconst1, NULL, true);
8759 break;
8760
8761 CASE_FLT_FN (BUILT_IN_ATANH):
8762 if (validate_arg (arg0, REAL_TYPE))
8763 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
8764 &dconstm1, &dconst1, false);
8765 break;
8766
8767 CASE_FLT_FN (BUILT_IN_SIN):
8768 if (validate_arg (arg0, REAL_TYPE))
8769 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
8770 break;
8771
8772 CASE_FLT_FN (BUILT_IN_COS):
8773 if (validate_arg (arg0, REAL_TYPE))
8774 return do_mpfr_arg1 (arg0, type, mpfr_cos, NULL, NULL, 0);
8775 break;
8776
8777 CASE_FLT_FN (BUILT_IN_TAN):
8778 if (validate_arg (arg0, REAL_TYPE))
8779 return do_mpfr_arg1 (arg0, type, mpfr_tan, NULL, NULL, 0);
8780 break;
8781
8782 CASE_FLT_FN (BUILT_IN_CEXP):
8783 if (validate_arg (arg0, COMPLEX_TYPE)
8784 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8785 return do_mpc_arg1 (arg0, type, mpc_exp);
8786 break;
8787
8788 CASE_FLT_FN (BUILT_IN_CEXPI):
8789 if (validate_arg (arg0, REAL_TYPE))
8790 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
8791 break;
8792
8793 CASE_FLT_FN (BUILT_IN_SINH):
8794 if (validate_arg (arg0, REAL_TYPE))
8795 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
8796 break;
8797
8798 CASE_FLT_FN (BUILT_IN_COSH):
8799 if (validate_arg (arg0, REAL_TYPE))
8800 return do_mpfr_arg1 (arg0, type, mpfr_cosh, NULL, NULL, 0);
8801 break;
8802
8803 CASE_FLT_FN (BUILT_IN_TANH):
8804 if (validate_arg (arg0, REAL_TYPE))
8805 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
8806 break;
8807
8808 CASE_FLT_FN (BUILT_IN_ERF):
8809 if (validate_arg (arg0, REAL_TYPE))
8810 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
8811 break;
8812
8813 CASE_FLT_FN (BUILT_IN_ERFC):
8814 if (validate_arg (arg0, REAL_TYPE))
8815 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
8816 break;
8817
8818 CASE_FLT_FN (BUILT_IN_TGAMMA):
8819 if (validate_arg (arg0, REAL_TYPE))
8820 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
8821 break;
8822
8823 CASE_FLT_FN (BUILT_IN_EXP):
8824 if (validate_arg (arg0, REAL_TYPE))
8825 return do_mpfr_arg1 (arg0, type, mpfr_exp, NULL, NULL, 0);
8826 break;
8827
8828 CASE_FLT_FN (BUILT_IN_EXP2):
8829 if (validate_arg (arg0, REAL_TYPE))
8830 return do_mpfr_arg1 (arg0, type, mpfr_exp2, NULL, NULL, 0);
8831 break;
8832
8833 CASE_FLT_FN (BUILT_IN_EXP10):
8834 CASE_FLT_FN (BUILT_IN_POW10):
8835 if (validate_arg (arg0, REAL_TYPE))
8836 return do_mpfr_arg1 (arg0, type, mpfr_exp10, NULL, NULL, 0);
8837 break;
8838
8839 CASE_FLT_FN (BUILT_IN_EXPM1):
8840 if (validate_arg (arg0, REAL_TYPE))
8841 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
8842 break;
8843
8844 CASE_FLT_FN (BUILT_IN_LOG):
8845 if (validate_arg (arg0, REAL_TYPE))
8846 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
8847 break;
8848
8849 CASE_FLT_FN (BUILT_IN_LOG2):
8850 if (validate_arg (arg0, REAL_TYPE))
8851 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
8852 break;
8853
8854 CASE_FLT_FN (BUILT_IN_LOG10):
8855 if (validate_arg (arg0, REAL_TYPE))
8856 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
8857 break;
8858
8859 CASE_FLT_FN (BUILT_IN_LOG1P):
8860 if (validate_arg (arg0, REAL_TYPE))
8861 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
8862 &dconstm1, NULL, false);
8863 break;
8864
8865 CASE_FLT_FN (BUILT_IN_J0):
8866 if (validate_arg (arg0, REAL_TYPE))
8867 return do_mpfr_arg1 (arg0, type, mpfr_j0,
8868 NULL, NULL, 0);
8869 break;
8870
8871 CASE_FLT_FN (BUILT_IN_J1):
8872 if (validate_arg (arg0, REAL_TYPE))
8873 return do_mpfr_arg1 (arg0, type, mpfr_j1,
8874 NULL, NULL, 0);
8875 break;
8876
8877 CASE_FLT_FN (BUILT_IN_Y0):
8878 if (validate_arg (arg0, REAL_TYPE))
8879 return do_mpfr_arg1 (arg0, type, mpfr_y0,
8880 &dconst0, NULL, false);
8881 break;
8882
8883 CASE_FLT_FN (BUILT_IN_Y1):
8884 if (validate_arg (arg0, REAL_TYPE))
8885 return do_mpfr_arg1 (arg0, type, mpfr_y1,
8886 &dconst0, NULL, false);
8887 break;
8888
8889 CASE_FLT_FN (BUILT_IN_NAN):
8890 case BUILT_IN_NAND32:
8891 case BUILT_IN_NAND64:
8892 case BUILT_IN_NAND128:
8893 return fold_builtin_nan (arg0, type, true);
8894
8895 CASE_FLT_FN (BUILT_IN_NANS):
8896 return fold_builtin_nan (arg0, type, false);
8897
8898 CASE_FLT_FN (BUILT_IN_FLOOR):
8899 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8900 {
8901 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8902 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8903 {
8904 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8905 REAL_VALUE_TYPE r;
8906 real_floor (&r, TYPE_MODE (type), &x);
8907 return build_real (type, r);
8908 }
8909 }
8910 break;
8911
8912 CASE_FLT_FN (BUILT_IN_CEIL):
8913 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8914 {
8915 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8916 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8917 {
8918 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8919 REAL_VALUE_TYPE r;
8920 real_ceil (&r, TYPE_MODE (type), &x);
8921 return build_real (type, r);
8922 }
8923 }
8924 break;
8925
8926 CASE_FLT_FN (BUILT_IN_TRUNC):
8927 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8928 {
8929 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8930 REAL_VALUE_TYPE r;
8931 real_trunc (&r, TYPE_MODE (type), &x);
8932 return build_real (type, r);
8933 }
8934 break;
8935
8936 CASE_FLT_FN (BUILT_IN_ROUND):
8937 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8938 {
8939 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8940 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8941 {
8942 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8943 REAL_VALUE_TYPE r;
8944 real_round (&r, TYPE_MODE (type), &x);
8945 return build_real (type, r);
8946 }
8947 }
8948 break;
8949
8950 CASE_FLT_FN (BUILT_IN_ICEIL):
8951 CASE_FLT_FN (BUILT_IN_LCEIL):
8952 CASE_FLT_FN (BUILT_IN_LLCEIL):
8953 return do_real_to_int_conversion (type, arg0, real_ceil);
8954
8955 CASE_FLT_FN (BUILT_IN_LFLOOR):
8956 CASE_FLT_FN (BUILT_IN_IFLOOR):
8957 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8958 return do_real_to_int_conversion (type, arg0, real_floor);
8959
8960 CASE_FLT_FN (BUILT_IN_IROUND):
8961 CASE_FLT_FN (BUILT_IN_LROUND):
8962 CASE_FLT_FN (BUILT_IN_LLROUND):
8963 return do_real_to_int_conversion (type, arg0, real_round);
8964
8965 CASE_FLT_FN (BUILT_IN_IRINT):
8966 CASE_FLT_FN (BUILT_IN_LRINT):
8967 CASE_FLT_FN (BUILT_IN_LLRINT):
8968 /* Not yet folded to a constant. */
8969 return NULL_TREE;
8970
8971 case BUILT_IN_BSWAP16:
8972 case BUILT_IN_BSWAP32:
8973 case BUILT_IN_BSWAP64:
8974 return fold_builtin_bswap (fndecl, arg0);
8975
8976 CASE_INT_FN (BUILT_IN_FFS):
8977 CASE_INT_FN (BUILT_IN_CLZ):
8978 CASE_INT_FN (BUILT_IN_CTZ):
8979 CASE_INT_FN (BUILT_IN_CLRSB):
8980 CASE_INT_FN (BUILT_IN_POPCOUNT):
8981 CASE_INT_FN (BUILT_IN_PARITY):
8982 return fold_builtin_bitop (fndecl, arg0);
8983
8984 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8985 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8986 return (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))
8987 ? build_one_cst (type)
8988 : build_zero_cst (type));
8989 break;
8990
8991 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
8992 return fold_const_builtin_significand (loc, arg0, type);
8993
8994 CASE_FLT_FN (BUILT_IN_ILOGB):
8995 CASE_FLT_FN (BUILT_IN_LOGB):
8996 return fold_const_builtin_logb (loc, arg0, type);
8997
8998 case BUILT_IN_ISASCII:
8999 return fold_builtin_isascii (loc, arg0);
9000
9001 case BUILT_IN_TOASCII:
9002 return fold_builtin_toascii (loc, arg0);
9003
9004 case BUILT_IN_ISDIGIT:
9005 return fold_builtin_isdigit (loc, arg0);
9006
9007 CASE_FLT_FN (BUILT_IN_FINITE):
9008 case BUILT_IN_FINITED32:
9009 case BUILT_IN_FINITED64:
9010 case BUILT_IN_FINITED128:
9011 case BUILT_IN_ISFINITE:
9012 {
9013 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9014 if (ret)
9015 return ret;
9016 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9017 }
9018
9019 CASE_FLT_FN (BUILT_IN_ISINF):
9020 case BUILT_IN_ISINFD32:
9021 case BUILT_IN_ISINFD64:
9022 case BUILT_IN_ISINFD128:
9023 {
9024 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9025 if (ret)
9026 return ret;
9027 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9028 }
9029
9030 case BUILT_IN_ISNORMAL:
9031 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9032
9033 case BUILT_IN_ISINF_SIGN:
9034 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9035
9036 CASE_FLT_FN (BUILT_IN_ISNAN):
9037 case BUILT_IN_ISNAND32:
9038 case BUILT_IN_ISNAND64:
9039 case BUILT_IN_ISNAND128:
9040 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9041
9042 case BUILT_IN_FREE:
9043 if (integer_zerop (arg0))
9044 return build_empty_stmt (loc);
9045 break;
9046
9047 default:
9048 break;
9049 }
9050
9051 return NULL_TREE;
9052
9053 }
9054
9055 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9056 This function returns NULL_TREE if no simplification was possible. */
9057
9058 static tree
9059 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9060 {
9061 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9062 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9063
9064 switch (fcode)
9065 {
9066 CASE_FLT_FN (BUILT_IN_JN):
9067 if (validate_arg (arg0, INTEGER_TYPE)
9068 && validate_arg (arg1, REAL_TYPE))
9069 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9070 break;
9071
9072 CASE_FLT_FN (BUILT_IN_YN):
9073 if (validate_arg (arg0, INTEGER_TYPE)
9074 && validate_arg (arg1, REAL_TYPE))
9075 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9076 &dconst0, false);
9077 break;
9078
9079 CASE_FLT_FN (BUILT_IN_DREM):
9080 CASE_FLT_FN (BUILT_IN_REMAINDER):
9081 if (validate_arg (arg0, REAL_TYPE)
9082 && validate_arg (arg1, REAL_TYPE))
9083 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9084 break;
9085
9086 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9087 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9088 if (validate_arg (arg0, REAL_TYPE)
9089 && validate_arg (arg1, POINTER_TYPE))
9090 return do_mpfr_lgamma_r (arg0, arg1, type);
9091 break;
9092
9093 CASE_FLT_FN (BUILT_IN_ATAN2):
9094 if (validate_arg (arg0, REAL_TYPE)
9095 && validate_arg (arg1, REAL_TYPE))
9096 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9097 break;
9098
9099 CASE_FLT_FN (BUILT_IN_FDIM):
9100 if (validate_arg (arg0, REAL_TYPE)
9101 && validate_arg (arg1, REAL_TYPE))
9102 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9103 break;
9104
9105 CASE_FLT_FN (BUILT_IN_HYPOT):
9106 if (validate_arg (arg0, REAL_TYPE)
9107 && validate_arg (arg1, REAL_TYPE))
9108 return do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot);
9109 break;
9110
9111 CASE_FLT_FN (BUILT_IN_CPOW):
9112 if (validate_arg (arg0, COMPLEX_TYPE)
9113 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9114 && validate_arg (arg1, COMPLEX_TYPE)
9115 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9116 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9117 break;
9118
9119 CASE_FLT_FN (BUILT_IN_LDEXP):
9120 return fold_const_builtin_load_exponent (arg0, arg1, type,
9121 /*ldexp=*/true);
9122 CASE_FLT_FN (BUILT_IN_SCALBN):
9123 CASE_FLT_FN (BUILT_IN_SCALBLN):
9124 return fold_const_builtin_load_exponent (arg0, arg1, type,
9125 /*ldexp=*/false);
9126
9127 CASE_FLT_FN (BUILT_IN_FREXP):
9128 return fold_builtin_frexp (loc, arg0, arg1, type);
9129
9130 CASE_FLT_FN (BUILT_IN_MODF):
9131 return fold_builtin_modf (loc, arg0, arg1, type);
9132
9133 case BUILT_IN_STRSTR:
9134 return fold_builtin_strstr (loc, arg0, arg1, type);
9135
9136 case BUILT_IN_STRSPN:
9137 return fold_builtin_strspn (loc, arg0, arg1);
9138
9139 case BUILT_IN_STRCSPN:
9140 return fold_builtin_strcspn (loc, arg0, arg1);
9141
9142 case BUILT_IN_STRCHR:
9143 case BUILT_IN_INDEX:
9144 return fold_builtin_strchr (loc, arg0, arg1, type);
9145
9146 case BUILT_IN_STRRCHR:
9147 case BUILT_IN_RINDEX:
9148 return fold_builtin_strrchr (loc, arg0, arg1, type);
9149
9150 case BUILT_IN_STRCMP:
9151 return fold_builtin_strcmp (loc, arg0, arg1);
9152
9153 case BUILT_IN_STRPBRK:
9154 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9155
9156 case BUILT_IN_EXPECT:
9157 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9158
9159 CASE_FLT_FN (BUILT_IN_POW):
9160 return fold_const_builtin_pow (arg0, arg1, type);
9161
9162 CASE_FLT_FN (BUILT_IN_POWI):
9163 if (TREE_CODE (arg0) == REAL_CST
9164 && !TREE_OVERFLOW (arg0)
9165 && tree_fits_shwi_p (arg1))
9166 {
9167 HOST_WIDE_INT c = tree_to_shwi (arg1);
9168 REAL_VALUE_TYPE x;
9169 real_powi (&x, TYPE_MODE (type), TREE_REAL_CST_PTR (arg0), c);
9170 return build_real (type, x);
9171 }
9172 break;
9173
9174 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9175 if (TREE_CODE (arg0) == REAL_CST
9176 && TREE_CODE (arg1) == REAL_CST
9177 && !TREE_OVERFLOW (arg0)
9178 && !TREE_OVERFLOW (arg1))
9179 {
9180 REAL_VALUE_TYPE c1 = TREE_REAL_CST (arg0);
9181 real_copysign (&c1, TREE_REAL_CST_PTR (arg1));
9182 return build_real (type, c1);
9183 }
9184 break;
9185
9186 CASE_FLT_FN (BUILT_IN_FMIN):
9187 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9188 return do_mpfr_arg2 (arg0, arg1, type, mpfr_min);
9189 break;
9190
9191 CASE_FLT_FN (BUILT_IN_FMAX):
9192 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9193 return do_mpfr_arg2 (arg0, arg1, type, mpfr_max);
9194 break;
9195
9196 case BUILT_IN_ISGREATER:
9197 return fold_builtin_unordered_cmp (loc, fndecl,
9198 arg0, arg1, UNLE_EXPR, LE_EXPR);
9199 case BUILT_IN_ISGREATEREQUAL:
9200 return fold_builtin_unordered_cmp (loc, fndecl,
9201 arg0, arg1, UNLT_EXPR, LT_EXPR);
9202 case BUILT_IN_ISLESS:
9203 return fold_builtin_unordered_cmp (loc, fndecl,
9204 arg0, arg1, UNGE_EXPR, GE_EXPR);
9205 case BUILT_IN_ISLESSEQUAL:
9206 return fold_builtin_unordered_cmp (loc, fndecl,
9207 arg0, arg1, UNGT_EXPR, GT_EXPR);
9208 case BUILT_IN_ISLESSGREATER:
9209 return fold_builtin_unordered_cmp (loc, fndecl,
9210 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9211 case BUILT_IN_ISUNORDERED:
9212 return fold_builtin_unordered_cmp (loc, fndecl,
9213 arg0, arg1, UNORDERED_EXPR,
9214 NOP_EXPR);
9215
9216 /* We do the folding for va_start in the expander. */
9217 case BUILT_IN_VA_START:
9218 break;
9219
9220 case BUILT_IN_OBJECT_SIZE:
9221 return fold_builtin_object_size (arg0, arg1);
9222
9223 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9224 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9225
9226 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9227 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9228
9229 default:
9230 break;
9231 }
9232 return NULL_TREE;
9233 }
9234
9235 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9236 and ARG2.
9237 This function returns NULL_TREE if no simplification was possible. */
9238
9239 static tree
9240 fold_builtin_3 (location_t loc, tree fndecl,
9241 tree arg0, tree arg1, tree arg2)
9242 {
9243 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9244 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9245 switch (fcode)
9246 {
9247
9248 CASE_FLT_FN (BUILT_IN_SINCOS):
9249 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9250
9251 CASE_FLT_FN (BUILT_IN_FMA):
9252 if (tree tem = fold_fma (loc, type, arg0, arg1, arg2))
9253 return tem;
9254 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9255
9256 CASE_FLT_FN (BUILT_IN_REMQUO):
9257 if (validate_arg (arg0, REAL_TYPE)
9258 && validate_arg (arg1, REAL_TYPE)
9259 && validate_arg (arg2, POINTER_TYPE))
9260 return do_mpfr_remquo (arg0, arg1, arg2);
9261 break;
9262
9263 case BUILT_IN_STRNCMP:
9264 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
9265
9266 case BUILT_IN_MEMCHR:
9267 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
9268
9269 case BUILT_IN_BCMP:
9270 case BUILT_IN_MEMCMP:
9271 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9272
9273 case BUILT_IN_EXPECT:
9274 return fold_builtin_expect (loc, arg0, arg1, arg2);
9275
9276 case BUILT_IN_ADD_OVERFLOW:
9277 case BUILT_IN_SUB_OVERFLOW:
9278 case BUILT_IN_MUL_OVERFLOW:
9279 case BUILT_IN_SADD_OVERFLOW:
9280 case BUILT_IN_SADDL_OVERFLOW:
9281 case BUILT_IN_SADDLL_OVERFLOW:
9282 case BUILT_IN_SSUB_OVERFLOW:
9283 case BUILT_IN_SSUBL_OVERFLOW:
9284 case BUILT_IN_SSUBLL_OVERFLOW:
9285 case BUILT_IN_SMUL_OVERFLOW:
9286 case BUILT_IN_SMULL_OVERFLOW:
9287 case BUILT_IN_SMULLL_OVERFLOW:
9288 case BUILT_IN_UADD_OVERFLOW:
9289 case BUILT_IN_UADDL_OVERFLOW:
9290 case BUILT_IN_UADDLL_OVERFLOW:
9291 case BUILT_IN_USUB_OVERFLOW:
9292 case BUILT_IN_USUBL_OVERFLOW:
9293 case BUILT_IN_USUBLL_OVERFLOW:
9294 case BUILT_IN_UMUL_OVERFLOW:
9295 case BUILT_IN_UMULL_OVERFLOW:
9296 case BUILT_IN_UMULLL_OVERFLOW:
9297 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9298
9299 default:
9300 break;
9301 }
9302 return NULL_TREE;
9303 }
9304
9305 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9306 arguments. IGNORE is true if the result of the
9307 function call is ignored. This function returns NULL_TREE if no
9308 simplification was possible. */
9309
9310 tree
9311 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9312 {
9313 tree ret = NULL_TREE;
9314
9315 switch (nargs)
9316 {
9317 case 0:
9318 ret = fold_builtin_0 (loc, fndecl);
9319 break;
9320 case 1:
9321 ret = fold_builtin_1 (loc, fndecl, args[0]);
9322 break;
9323 case 2:
9324 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9325 break;
9326 case 3:
9327 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9328 break;
9329 default:
9330 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9331 break;
9332 }
9333 if (ret)
9334 {
9335 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9336 SET_EXPR_LOCATION (ret, loc);
9337 TREE_NO_WARNING (ret) = 1;
9338 return ret;
9339 }
9340 return NULL_TREE;
9341 }
9342
9343 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9344 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9345 of arguments in ARGS to be omitted. OLDNARGS is the number of
9346 elements in ARGS. */
9347
9348 static tree
9349 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9350 int skip, tree fndecl, int n, va_list newargs)
9351 {
9352 int nargs = oldnargs - skip + n;
9353 tree *buffer;
9354
9355 if (n > 0)
9356 {
9357 int i, j;
9358
9359 buffer = XALLOCAVEC (tree, nargs);
9360 for (i = 0; i < n; i++)
9361 buffer[i] = va_arg (newargs, tree);
9362 for (j = skip; j < oldnargs; j++, i++)
9363 buffer[i] = args[j];
9364 }
9365 else
9366 buffer = args + skip;
9367
9368 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9369 }
9370
9371 /* Return true if FNDECL shouldn't be folded right now.
9372 If a built-in function has an inline attribute always_inline
9373 wrapper, defer folding it after always_inline functions have
9374 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9375 might not be performed. */
9376
9377 bool
9378 avoid_folding_inline_builtin (tree fndecl)
9379 {
9380 return (DECL_DECLARED_INLINE_P (fndecl)
9381 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9382 && cfun
9383 && !cfun->always_inline_functions_inlined
9384 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9385 }
9386
9387 /* A wrapper function for builtin folding that prevents warnings for
9388 "statement without effect" and the like, caused by removing the
9389 call node earlier than the warning is generated. */
9390
9391 tree
9392 fold_call_expr (location_t loc, tree exp, bool ignore)
9393 {
9394 tree ret = NULL_TREE;
9395 tree fndecl = get_callee_fndecl (exp);
9396 if (fndecl
9397 && TREE_CODE (fndecl) == FUNCTION_DECL
9398 && DECL_BUILT_IN (fndecl)
9399 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9400 yet. Defer folding until we see all the arguments
9401 (after inlining). */
9402 && !CALL_EXPR_VA_ARG_PACK (exp))
9403 {
9404 int nargs = call_expr_nargs (exp);
9405
9406 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9407 instead last argument is __builtin_va_arg_pack (). Defer folding
9408 even in that case, until arguments are finalized. */
9409 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9410 {
9411 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9412 if (fndecl2
9413 && TREE_CODE (fndecl2) == FUNCTION_DECL
9414 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9415 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9416 return NULL_TREE;
9417 }
9418
9419 if (avoid_folding_inline_builtin (fndecl))
9420 return NULL_TREE;
9421
9422 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9423 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9424 CALL_EXPR_ARGP (exp), ignore);
9425 else
9426 {
9427 tree *args = CALL_EXPR_ARGP (exp);
9428 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9429 if (ret)
9430 return ret;
9431 }
9432 }
9433 return NULL_TREE;
9434 }
9435
9436 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9437 N arguments are passed in the array ARGARRAY. Return a folded
9438 expression or NULL_TREE if no simplification was possible. */
9439
9440 tree
9441 fold_builtin_call_array (location_t loc, tree,
9442 tree fn,
9443 int n,
9444 tree *argarray)
9445 {
9446 if (TREE_CODE (fn) != ADDR_EXPR)
9447 return NULL_TREE;
9448
9449 tree fndecl = TREE_OPERAND (fn, 0);
9450 if (TREE_CODE (fndecl) == FUNCTION_DECL
9451 && DECL_BUILT_IN (fndecl))
9452 {
9453 /* If last argument is __builtin_va_arg_pack (), arguments to this
9454 function are not finalized yet. Defer folding until they are. */
9455 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9456 {
9457 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9458 if (fndecl2
9459 && TREE_CODE (fndecl2) == FUNCTION_DECL
9460 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9461 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9462 return NULL_TREE;
9463 }
9464 if (avoid_folding_inline_builtin (fndecl))
9465 return NULL_TREE;
9466 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9467 return targetm.fold_builtin (fndecl, n, argarray, false);
9468 else
9469 return fold_builtin_n (loc, fndecl, argarray, n, false);
9470 }
9471
9472 return NULL_TREE;
9473 }
9474
9475 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9476 along with N new arguments specified as the "..." parameters. SKIP
9477 is the number of arguments in EXP to be omitted. This function is used
9478 to do varargs-to-varargs transformations. */
9479
9480 static tree
9481 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9482 {
9483 va_list ap;
9484 tree t;
9485
9486 va_start (ap, n);
9487 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9488 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9489 va_end (ap);
9490
9491 return t;
9492 }
9493
9494 /* Validate a single argument ARG against a tree code CODE representing
9495 a type. */
9496
9497 static bool
9498 validate_arg (const_tree arg, enum tree_code code)
9499 {
9500 if (!arg)
9501 return false;
9502 else if (code == POINTER_TYPE)
9503 return POINTER_TYPE_P (TREE_TYPE (arg));
9504 else if (code == INTEGER_TYPE)
9505 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9506 return code == TREE_CODE (TREE_TYPE (arg));
9507 }
9508
9509 /* This function validates the types of a function call argument list
9510 against a specified list of tree_codes. If the last specifier is a 0,
9511 that represents an ellipses, otherwise the last specifier must be a
9512 VOID_TYPE.
9513
9514 This is the GIMPLE version of validate_arglist. Eventually we want to
9515 completely convert builtins.c to work from GIMPLEs and the tree based
9516 validate_arglist will then be removed. */
9517
9518 bool
9519 validate_gimple_arglist (const gcall *call, ...)
9520 {
9521 enum tree_code code;
9522 bool res = 0;
9523 va_list ap;
9524 const_tree arg;
9525 size_t i;
9526
9527 va_start (ap, call);
9528 i = 0;
9529
9530 do
9531 {
9532 code = (enum tree_code) va_arg (ap, int);
9533 switch (code)
9534 {
9535 case 0:
9536 /* This signifies an ellipses, any further arguments are all ok. */
9537 res = true;
9538 goto end;
9539 case VOID_TYPE:
9540 /* This signifies an endlink, if no arguments remain, return
9541 true, otherwise return false. */
9542 res = (i == gimple_call_num_args (call));
9543 goto end;
9544 default:
9545 /* If no parameters remain or the parameter's code does not
9546 match the specified code, return false. Otherwise continue
9547 checking any remaining arguments. */
9548 arg = gimple_call_arg (call, i++);
9549 if (!validate_arg (arg, code))
9550 goto end;
9551 break;
9552 }
9553 }
9554 while (1);
9555
9556 /* We need gotos here since we can only have one VA_CLOSE in a
9557 function. */
9558 end: ;
9559 va_end (ap);
9560
9561 return res;
9562 }
9563
9564 /* Default target-specific builtin expander that does nothing. */
9565
9566 rtx
9567 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9568 rtx target ATTRIBUTE_UNUSED,
9569 rtx subtarget ATTRIBUTE_UNUSED,
9570 machine_mode mode ATTRIBUTE_UNUSED,
9571 int ignore ATTRIBUTE_UNUSED)
9572 {
9573 return NULL_RTX;
9574 }
9575
9576 /* Returns true is EXP represents data that would potentially reside
9577 in a readonly section. */
9578
9579 bool
9580 readonly_data_expr (tree exp)
9581 {
9582 STRIP_NOPS (exp);
9583
9584 if (TREE_CODE (exp) != ADDR_EXPR)
9585 return false;
9586
9587 exp = get_base_address (TREE_OPERAND (exp, 0));
9588 if (!exp)
9589 return false;
9590
9591 /* Make sure we call decl_readonly_section only for trees it
9592 can handle (since it returns true for everything it doesn't
9593 understand). */
9594 if (TREE_CODE (exp) == STRING_CST
9595 || TREE_CODE (exp) == CONSTRUCTOR
9596 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
9597 return decl_readonly_section (exp, 0);
9598 else
9599 return false;
9600 }
9601
9602 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
9603 to the call, and TYPE is its return type.
9604
9605 Return NULL_TREE if no simplification was possible, otherwise return the
9606 simplified form of the call as a tree.
9607
9608 The simplified form may be a constant or other expression which
9609 computes the same value, but in a more efficient manner (including
9610 calls to other builtin functions).
9611
9612 The call may contain arguments which need to be evaluated, but
9613 which are not useful to determine the result of the call. In
9614 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9615 COMPOUND_EXPR will be an argument which must be evaluated.
9616 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9617 COMPOUND_EXPR in the chain will contain the tree for the simplified
9618 form of the builtin function call. */
9619
9620 static tree
9621 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
9622 {
9623 if (!validate_arg (s1, POINTER_TYPE)
9624 || !validate_arg (s2, POINTER_TYPE))
9625 return NULL_TREE;
9626 else
9627 {
9628 tree fn;
9629 const char *p1, *p2;
9630
9631 p2 = c_getstr (s2);
9632 if (p2 == NULL)
9633 return NULL_TREE;
9634
9635 p1 = c_getstr (s1);
9636 if (p1 != NULL)
9637 {
9638 const char *r = strstr (p1, p2);
9639 tree tem;
9640
9641 if (r == NULL)
9642 return build_int_cst (TREE_TYPE (s1), 0);
9643
9644 /* Return an offset into the constant string argument. */
9645 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9646 return fold_convert_loc (loc, type, tem);
9647 }
9648
9649 /* The argument is const char *, and the result is char *, so we need
9650 a type conversion here to avoid a warning. */
9651 if (p2[0] == '\0')
9652 return fold_convert_loc (loc, type, s1);
9653
9654 if (p2[1] != '\0')
9655 return NULL_TREE;
9656
9657 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9658 if (!fn)
9659 return NULL_TREE;
9660
9661 /* New argument list transforming strstr(s1, s2) to
9662 strchr(s1, s2[0]). */
9663 return build_call_expr_loc (loc, fn, 2, s1,
9664 build_int_cst (integer_type_node, p2[0]));
9665 }
9666 }
9667
9668 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
9669 the call, and TYPE is its return type.
9670
9671 Return NULL_TREE if no simplification was possible, otherwise return the
9672 simplified form of the call as a tree.
9673
9674 The simplified form may be a constant or other expression which
9675 computes the same value, but in a more efficient manner (including
9676 calls to other builtin functions).
9677
9678 The call may contain arguments which need to be evaluated, but
9679 which are not useful to determine the result of the call. In
9680 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9681 COMPOUND_EXPR will be an argument which must be evaluated.
9682 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9683 COMPOUND_EXPR in the chain will contain the tree for the simplified
9684 form of the builtin function call. */
9685
9686 static tree
9687 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
9688 {
9689 if (!validate_arg (s1, POINTER_TYPE)
9690 || !validate_arg (s2, INTEGER_TYPE))
9691 return NULL_TREE;
9692 else
9693 {
9694 const char *p1;
9695
9696 if (TREE_CODE (s2) != INTEGER_CST)
9697 return NULL_TREE;
9698
9699 p1 = c_getstr (s1);
9700 if (p1 != NULL)
9701 {
9702 char c;
9703 const char *r;
9704 tree tem;
9705
9706 if (target_char_cast (s2, &c))
9707 return NULL_TREE;
9708
9709 r = strchr (p1, c);
9710
9711 if (r == NULL)
9712 return build_int_cst (TREE_TYPE (s1), 0);
9713
9714 /* Return an offset into the constant string argument. */
9715 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9716 return fold_convert_loc (loc, type, tem);
9717 }
9718 return NULL_TREE;
9719 }
9720 }
9721
9722 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
9723 the call, and TYPE is its return type.
9724
9725 Return NULL_TREE if no simplification was possible, otherwise return the
9726 simplified form of the call as a tree.
9727
9728 The simplified form may be a constant or other expression which
9729 computes the same value, but in a more efficient manner (including
9730 calls to other builtin functions).
9731
9732 The call may contain arguments which need to be evaluated, but
9733 which are not useful to determine the result of the call. In
9734 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9735 COMPOUND_EXPR will be an argument which must be evaluated.
9736 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9737 COMPOUND_EXPR in the chain will contain the tree for the simplified
9738 form of the builtin function call. */
9739
9740 static tree
9741 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
9742 {
9743 if (!validate_arg (s1, POINTER_TYPE)
9744 || !validate_arg (s2, INTEGER_TYPE))
9745 return NULL_TREE;
9746 else
9747 {
9748 tree fn;
9749 const char *p1;
9750
9751 if (TREE_CODE (s2) != INTEGER_CST)
9752 return NULL_TREE;
9753
9754 p1 = c_getstr (s1);
9755 if (p1 != NULL)
9756 {
9757 char c;
9758 const char *r;
9759 tree tem;
9760
9761 if (target_char_cast (s2, &c))
9762 return NULL_TREE;
9763
9764 r = strrchr (p1, c);
9765
9766 if (r == NULL)
9767 return build_int_cst (TREE_TYPE (s1), 0);
9768
9769 /* Return an offset into the constant string argument. */
9770 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9771 return fold_convert_loc (loc, type, tem);
9772 }
9773
9774 if (! integer_zerop (s2))
9775 return NULL_TREE;
9776
9777 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9778 if (!fn)
9779 return NULL_TREE;
9780
9781 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9782 return build_call_expr_loc (loc, fn, 2, s1, s2);
9783 }
9784 }
9785
9786 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9787 to the call, and TYPE is its return type.
9788
9789 Return NULL_TREE if no simplification was possible, otherwise return the
9790 simplified form of the call as a tree.
9791
9792 The simplified form may be a constant or other expression which
9793 computes the same value, but in a more efficient manner (including
9794 calls to other builtin functions).
9795
9796 The call may contain arguments which need to be evaluated, but
9797 which are not useful to determine the result of the call. In
9798 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9799 COMPOUND_EXPR will be an argument which must be evaluated.
9800 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9801 COMPOUND_EXPR in the chain will contain the tree for the simplified
9802 form of the builtin function call. */
9803
9804 static tree
9805 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9806 {
9807 if (!validate_arg (s1, POINTER_TYPE)
9808 || !validate_arg (s2, POINTER_TYPE))
9809 return NULL_TREE;
9810 else
9811 {
9812 tree fn;
9813 const char *p1, *p2;
9814
9815 p2 = c_getstr (s2);
9816 if (p2 == NULL)
9817 return NULL_TREE;
9818
9819 p1 = c_getstr (s1);
9820 if (p1 != NULL)
9821 {
9822 const char *r = strpbrk (p1, p2);
9823 tree tem;
9824
9825 if (r == NULL)
9826 return build_int_cst (TREE_TYPE (s1), 0);
9827
9828 /* Return an offset into the constant string argument. */
9829 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9830 return fold_convert_loc (loc, type, tem);
9831 }
9832
9833 if (p2[0] == '\0')
9834 /* strpbrk(x, "") == NULL.
9835 Evaluate and ignore s1 in case it had side-effects. */
9836 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9837
9838 if (p2[1] != '\0')
9839 return NULL_TREE; /* Really call strpbrk. */
9840
9841 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9842 if (!fn)
9843 return NULL_TREE;
9844
9845 /* New argument list transforming strpbrk(s1, s2) to
9846 strchr(s1, s2[0]). */
9847 return build_call_expr_loc (loc, fn, 2, s1,
9848 build_int_cst (integer_type_node, p2[0]));
9849 }
9850 }
9851
9852 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9853 to the call.
9854
9855 Return NULL_TREE if no simplification was possible, otherwise return the
9856 simplified form of the call as a tree.
9857
9858 The simplified form may be a constant or other expression which
9859 computes the same value, but in a more efficient manner (including
9860 calls to other builtin functions).
9861
9862 The call may contain arguments which need to be evaluated, but
9863 which are not useful to determine the result of the call. In
9864 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9865 COMPOUND_EXPR will be an argument which must be evaluated.
9866 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9867 COMPOUND_EXPR in the chain will contain the tree for the simplified
9868 form of the builtin function call. */
9869
9870 static tree
9871 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9872 {
9873 if (!validate_arg (s1, POINTER_TYPE)
9874 || !validate_arg (s2, POINTER_TYPE))
9875 return NULL_TREE;
9876 else
9877 {
9878 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9879
9880 /* If both arguments are constants, evaluate at compile-time. */
9881 if (p1 && p2)
9882 {
9883 const size_t r = strspn (p1, p2);
9884 return build_int_cst (size_type_node, r);
9885 }
9886
9887 /* If either argument is "", return NULL_TREE. */
9888 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9889 /* Evaluate and ignore both arguments in case either one has
9890 side-effects. */
9891 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9892 s1, s2);
9893 return NULL_TREE;
9894 }
9895 }
9896
9897 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9898 to the call.
9899
9900 Return NULL_TREE if no simplification was possible, otherwise return the
9901 simplified form of the call as a tree.
9902
9903 The simplified form may be a constant or other expression which
9904 computes the same value, but in a more efficient manner (including
9905 calls to other builtin functions).
9906
9907 The call may contain arguments which need to be evaluated, but
9908 which are not useful to determine the result of the call. In
9909 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9910 COMPOUND_EXPR will be an argument which must be evaluated.
9911 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9912 COMPOUND_EXPR in the chain will contain the tree for the simplified
9913 form of the builtin function call. */
9914
9915 static tree
9916 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9917 {
9918 if (!validate_arg (s1, POINTER_TYPE)
9919 || !validate_arg (s2, POINTER_TYPE))
9920 return NULL_TREE;
9921 else
9922 {
9923 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9924
9925 /* If both arguments are constants, evaluate at compile-time. */
9926 if (p1 && p2)
9927 {
9928 const size_t r = strcspn (p1, p2);
9929 return build_int_cst (size_type_node, r);
9930 }
9931
9932 /* If the first argument is "", return NULL_TREE. */
9933 if (p1 && *p1 == '\0')
9934 {
9935 /* Evaluate and ignore argument s2 in case it has
9936 side-effects. */
9937 return omit_one_operand_loc (loc, size_type_node,
9938 size_zero_node, s2);
9939 }
9940
9941 /* If the second argument is "", return __builtin_strlen(s1). */
9942 if (p2 && *p2 == '\0')
9943 {
9944 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9945
9946 /* If the replacement _DECL isn't initialized, don't do the
9947 transformation. */
9948 if (!fn)
9949 return NULL_TREE;
9950
9951 return build_call_expr_loc (loc, fn, 1, s1);
9952 }
9953 return NULL_TREE;
9954 }
9955 }
9956
9957 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9958 produced. False otherwise. This is done so that we don't output the error
9959 or warning twice or three times. */
9960
9961 bool
9962 fold_builtin_next_arg (tree exp, bool va_start_p)
9963 {
9964 tree fntype = TREE_TYPE (current_function_decl);
9965 int nargs = call_expr_nargs (exp);
9966 tree arg;
9967 /* There is good chance the current input_location points inside the
9968 definition of the va_start macro (perhaps on the token for
9969 builtin) in a system header, so warnings will not be emitted.
9970 Use the location in real source code. */
9971 source_location current_location =
9972 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9973 NULL);
9974
9975 if (!stdarg_p (fntype))
9976 {
9977 error ("%<va_start%> used in function with fixed args");
9978 return true;
9979 }
9980
9981 if (va_start_p)
9982 {
9983 if (va_start_p && (nargs != 2))
9984 {
9985 error ("wrong number of arguments to function %<va_start%>");
9986 return true;
9987 }
9988 arg = CALL_EXPR_ARG (exp, 1);
9989 }
9990 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9991 when we checked the arguments and if needed issued a warning. */
9992 else
9993 {
9994 if (nargs == 0)
9995 {
9996 /* Evidently an out of date version of <stdarg.h>; can't validate
9997 va_start's second argument, but can still work as intended. */
9998 warning_at (current_location,
9999 OPT_Wvarargs,
10000 "%<__builtin_next_arg%> called without an argument");
10001 return true;
10002 }
10003 else if (nargs > 1)
10004 {
10005 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10006 return true;
10007 }
10008 arg = CALL_EXPR_ARG (exp, 0);
10009 }
10010
10011 if (TREE_CODE (arg) == SSA_NAME)
10012 arg = SSA_NAME_VAR (arg);
10013
10014 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10015 or __builtin_next_arg (0) the first time we see it, after checking
10016 the arguments and if needed issuing a warning. */
10017 if (!integer_zerop (arg))
10018 {
10019 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10020
10021 /* Strip off all nops for the sake of the comparison. This
10022 is not quite the same as STRIP_NOPS. It does more.
10023 We must also strip off INDIRECT_EXPR for C++ reference
10024 parameters. */
10025 while (CONVERT_EXPR_P (arg)
10026 || TREE_CODE (arg) == INDIRECT_REF)
10027 arg = TREE_OPERAND (arg, 0);
10028 if (arg != last_parm)
10029 {
10030 /* FIXME: Sometimes with the tree optimizers we can get the
10031 not the last argument even though the user used the last
10032 argument. We just warn and set the arg to be the last
10033 argument so that we will get wrong-code because of
10034 it. */
10035 warning_at (current_location,
10036 OPT_Wvarargs,
10037 "second parameter of %<va_start%> not last named argument");
10038 }
10039
10040 /* Undefined by C99 7.15.1.4p4 (va_start):
10041 "If the parameter parmN is declared with the register storage
10042 class, with a function or array type, or with a type that is
10043 not compatible with the type that results after application of
10044 the default argument promotions, the behavior is undefined."
10045 */
10046 else if (DECL_REGISTER (arg))
10047 {
10048 warning_at (current_location,
10049 OPT_Wvarargs,
10050 "undefined behaviour when second parameter of "
10051 "%<va_start%> is declared with %<register%> storage");
10052 }
10053
10054 /* We want to verify the second parameter just once before the tree
10055 optimizers are run and then avoid keeping it in the tree,
10056 as otherwise we could warn even for correct code like:
10057 void foo (int i, ...)
10058 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10059 if (va_start_p)
10060 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10061 else
10062 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10063 }
10064 return false;
10065 }
10066
10067
10068 /* Expand a call EXP to __builtin_object_size. */
10069
10070 static rtx
10071 expand_builtin_object_size (tree exp)
10072 {
10073 tree ost;
10074 int object_size_type;
10075 tree fndecl = get_callee_fndecl (exp);
10076
10077 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10078 {
10079 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10080 exp, fndecl);
10081 expand_builtin_trap ();
10082 return const0_rtx;
10083 }
10084
10085 ost = CALL_EXPR_ARG (exp, 1);
10086 STRIP_NOPS (ost);
10087
10088 if (TREE_CODE (ost) != INTEGER_CST
10089 || tree_int_cst_sgn (ost) < 0
10090 || compare_tree_int (ost, 3) > 0)
10091 {
10092 error ("%Klast argument of %D is not integer constant between 0 and 3",
10093 exp, fndecl);
10094 expand_builtin_trap ();
10095 return const0_rtx;
10096 }
10097
10098 object_size_type = tree_to_shwi (ost);
10099
10100 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10101 }
10102
10103 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10104 FCODE is the BUILT_IN_* to use.
10105 Return NULL_RTX if we failed; the caller should emit a normal call,
10106 otherwise try to get the result in TARGET, if convenient (and in
10107 mode MODE if that's convenient). */
10108
10109 static rtx
10110 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10111 enum built_in_function fcode)
10112 {
10113 tree dest, src, len, size;
10114
10115 if (!validate_arglist (exp,
10116 POINTER_TYPE,
10117 fcode == BUILT_IN_MEMSET_CHK
10118 ? INTEGER_TYPE : POINTER_TYPE,
10119 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10120 return NULL_RTX;
10121
10122 dest = CALL_EXPR_ARG (exp, 0);
10123 src = CALL_EXPR_ARG (exp, 1);
10124 len = CALL_EXPR_ARG (exp, 2);
10125 size = CALL_EXPR_ARG (exp, 3);
10126
10127 if (! tree_fits_uhwi_p (size))
10128 return NULL_RTX;
10129
10130 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10131 {
10132 tree fn;
10133
10134 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
10135 {
10136 warning_at (tree_nonartificial_location (exp),
10137 0, "%Kcall to %D will always overflow destination buffer",
10138 exp, get_callee_fndecl (exp));
10139 return NULL_RTX;
10140 }
10141
10142 fn = NULL_TREE;
10143 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10144 mem{cpy,pcpy,move,set} is available. */
10145 switch (fcode)
10146 {
10147 case BUILT_IN_MEMCPY_CHK:
10148 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10149 break;
10150 case BUILT_IN_MEMPCPY_CHK:
10151 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10152 break;
10153 case BUILT_IN_MEMMOVE_CHK:
10154 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10155 break;
10156 case BUILT_IN_MEMSET_CHK:
10157 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10158 break;
10159 default:
10160 break;
10161 }
10162
10163 if (! fn)
10164 return NULL_RTX;
10165
10166 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10167 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10168 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10169 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10170 }
10171 else if (fcode == BUILT_IN_MEMSET_CHK)
10172 return NULL_RTX;
10173 else
10174 {
10175 unsigned int dest_align = get_pointer_alignment (dest);
10176
10177 /* If DEST is not a pointer type, call the normal function. */
10178 if (dest_align == 0)
10179 return NULL_RTX;
10180
10181 /* If SRC and DEST are the same (and not volatile), do nothing. */
10182 if (operand_equal_p (src, dest, 0))
10183 {
10184 tree expr;
10185
10186 if (fcode != BUILT_IN_MEMPCPY_CHK)
10187 {
10188 /* Evaluate and ignore LEN in case it has side-effects. */
10189 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10190 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10191 }
10192
10193 expr = fold_build_pointer_plus (dest, len);
10194 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10195 }
10196
10197 /* __memmove_chk special case. */
10198 if (fcode == BUILT_IN_MEMMOVE_CHK)
10199 {
10200 unsigned int src_align = get_pointer_alignment (src);
10201
10202 if (src_align == 0)
10203 return NULL_RTX;
10204
10205 /* If src is categorized for a readonly section we can use
10206 normal __memcpy_chk. */
10207 if (readonly_data_expr (src))
10208 {
10209 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10210 if (!fn)
10211 return NULL_RTX;
10212 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10213 dest, src, len, size);
10214 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10215 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10216 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10217 }
10218 }
10219 return NULL_RTX;
10220 }
10221 }
10222
10223 /* Emit warning if a buffer overflow is detected at compile time. */
10224
10225 static void
10226 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10227 {
10228 int is_strlen = 0;
10229 tree len, size;
10230 location_t loc = tree_nonartificial_location (exp);
10231
10232 switch (fcode)
10233 {
10234 case BUILT_IN_STRCPY_CHK:
10235 case BUILT_IN_STPCPY_CHK:
10236 /* For __strcat_chk the warning will be emitted only if overflowing
10237 by at least strlen (dest) + 1 bytes. */
10238 case BUILT_IN_STRCAT_CHK:
10239 len = CALL_EXPR_ARG (exp, 1);
10240 size = CALL_EXPR_ARG (exp, 2);
10241 is_strlen = 1;
10242 break;
10243 case BUILT_IN_STRNCAT_CHK:
10244 case BUILT_IN_STRNCPY_CHK:
10245 case BUILT_IN_STPNCPY_CHK:
10246 len = CALL_EXPR_ARG (exp, 2);
10247 size = CALL_EXPR_ARG (exp, 3);
10248 break;
10249 case BUILT_IN_SNPRINTF_CHK:
10250 case BUILT_IN_VSNPRINTF_CHK:
10251 len = CALL_EXPR_ARG (exp, 1);
10252 size = CALL_EXPR_ARG (exp, 3);
10253 break;
10254 default:
10255 gcc_unreachable ();
10256 }
10257
10258 if (!len || !size)
10259 return;
10260
10261 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10262 return;
10263
10264 if (is_strlen)
10265 {
10266 len = c_strlen (len, 1);
10267 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10268 return;
10269 }
10270 else if (fcode == BUILT_IN_STRNCAT_CHK)
10271 {
10272 tree src = CALL_EXPR_ARG (exp, 1);
10273 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10274 return;
10275 src = c_strlen (src, 1);
10276 if (! src || ! tree_fits_uhwi_p (src))
10277 {
10278 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
10279 exp, get_callee_fndecl (exp));
10280 return;
10281 }
10282 else if (tree_int_cst_lt (src, size))
10283 return;
10284 }
10285 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
10286 return;
10287
10288 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
10289 exp, get_callee_fndecl (exp));
10290 }
10291
10292 /* Emit warning if a buffer overflow is detected at compile time
10293 in __sprintf_chk/__vsprintf_chk calls. */
10294
10295 static void
10296 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10297 {
10298 tree size, len, fmt;
10299 const char *fmt_str;
10300 int nargs = call_expr_nargs (exp);
10301
10302 /* Verify the required arguments in the original call. */
10303
10304 if (nargs < 4)
10305 return;
10306 size = CALL_EXPR_ARG (exp, 2);
10307 fmt = CALL_EXPR_ARG (exp, 3);
10308
10309 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10310 return;
10311
10312 /* Check whether the format is a literal string constant. */
10313 fmt_str = c_getstr (fmt);
10314 if (fmt_str == NULL)
10315 return;
10316
10317 if (!init_target_chars ())
10318 return;
10319
10320 /* If the format doesn't contain % args or %%, we know its size. */
10321 if (strchr (fmt_str, target_percent) == 0)
10322 len = build_int_cstu (size_type_node, strlen (fmt_str));
10323 /* If the format is "%s" and first ... argument is a string literal,
10324 we know it too. */
10325 else if (fcode == BUILT_IN_SPRINTF_CHK
10326 && strcmp (fmt_str, target_percent_s) == 0)
10327 {
10328 tree arg;
10329
10330 if (nargs < 5)
10331 return;
10332 arg = CALL_EXPR_ARG (exp, 4);
10333 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10334 return;
10335
10336 len = c_strlen (arg, 1);
10337 if (!len || ! tree_fits_uhwi_p (len))
10338 return;
10339 }
10340 else
10341 return;
10342
10343 if (! tree_int_cst_lt (len, size))
10344 warning_at (tree_nonartificial_location (exp),
10345 0, "%Kcall to %D will always overflow destination buffer",
10346 exp, get_callee_fndecl (exp));
10347 }
10348
10349 /* Emit warning if a free is called with address of a variable. */
10350
10351 static void
10352 maybe_emit_free_warning (tree exp)
10353 {
10354 tree arg = CALL_EXPR_ARG (exp, 0);
10355
10356 STRIP_NOPS (arg);
10357 if (TREE_CODE (arg) != ADDR_EXPR)
10358 return;
10359
10360 arg = get_base_address (TREE_OPERAND (arg, 0));
10361 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10362 return;
10363
10364 if (SSA_VAR_P (arg))
10365 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10366 "%Kattempt to free a non-heap object %qD", exp, arg);
10367 else
10368 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10369 "%Kattempt to free a non-heap object", exp);
10370 }
10371
10372 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10373 if possible. */
10374
10375 static tree
10376 fold_builtin_object_size (tree ptr, tree ost)
10377 {
10378 unsigned HOST_WIDE_INT bytes;
10379 int object_size_type;
10380
10381 if (!validate_arg (ptr, POINTER_TYPE)
10382 || !validate_arg (ost, INTEGER_TYPE))
10383 return NULL_TREE;
10384
10385 STRIP_NOPS (ost);
10386
10387 if (TREE_CODE (ost) != INTEGER_CST
10388 || tree_int_cst_sgn (ost) < 0
10389 || compare_tree_int (ost, 3) > 0)
10390 return NULL_TREE;
10391
10392 object_size_type = tree_to_shwi (ost);
10393
10394 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10395 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10396 and (size_t) 0 for types 2 and 3. */
10397 if (TREE_SIDE_EFFECTS (ptr))
10398 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10399
10400 if (TREE_CODE (ptr) == ADDR_EXPR)
10401 {
10402 bytes = compute_builtin_object_size (ptr, object_size_type);
10403 if (wi::fits_to_tree_p (bytes, size_type_node))
10404 return build_int_cstu (size_type_node, bytes);
10405 }
10406 else if (TREE_CODE (ptr) == SSA_NAME)
10407 {
10408 /* If object size is not known yet, delay folding until
10409 later. Maybe subsequent passes will help determining
10410 it. */
10411 bytes = compute_builtin_object_size (ptr, object_size_type);
10412 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
10413 && wi::fits_to_tree_p (bytes, size_type_node))
10414 return build_int_cstu (size_type_node, bytes);
10415 }
10416
10417 return NULL_TREE;
10418 }
10419
10420 /* Builtins with folding operations that operate on "..." arguments
10421 need special handling; we need to store the arguments in a convenient
10422 data structure before attempting any folding. Fortunately there are
10423 only a few builtins that fall into this category. FNDECL is the
10424 function, EXP is the CALL_EXPR for the call. */
10425
10426 static tree
10427 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10428 {
10429 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10430 tree ret = NULL_TREE;
10431
10432 switch (fcode)
10433 {
10434 case BUILT_IN_FPCLASSIFY:
10435 ret = fold_builtin_fpclassify (loc, args, nargs);
10436 break;
10437
10438 default:
10439 break;
10440 }
10441 if (ret)
10442 {
10443 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10444 SET_EXPR_LOCATION (ret, loc);
10445 TREE_NO_WARNING (ret) = 1;
10446 return ret;
10447 }
10448 return NULL_TREE;
10449 }
10450
10451 /* Initialize format string characters in the target charset. */
10452
10453 bool
10454 init_target_chars (void)
10455 {
10456 static bool init;
10457 if (!init)
10458 {
10459 target_newline = lang_hooks.to_target_charset ('\n');
10460 target_percent = lang_hooks.to_target_charset ('%');
10461 target_c = lang_hooks.to_target_charset ('c');
10462 target_s = lang_hooks.to_target_charset ('s');
10463 if (target_newline == 0 || target_percent == 0 || target_c == 0
10464 || target_s == 0)
10465 return false;
10466
10467 target_percent_c[0] = target_percent;
10468 target_percent_c[1] = target_c;
10469 target_percent_c[2] = '\0';
10470
10471 target_percent_s[0] = target_percent;
10472 target_percent_s[1] = target_s;
10473 target_percent_s[2] = '\0';
10474
10475 target_percent_s_newline[0] = target_percent;
10476 target_percent_s_newline[1] = target_s;
10477 target_percent_s_newline[2] = target_newline;
10478 target_percent_s_newline[3] = '\0';
10479
10480 init = true;
10481 }
10482 return true;
10483 }
10484
10485 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10486 and no overflow/underflow occurred. INEXACT is true if M was not
10487 exactly calculated. TYPE is the tree type for the result. This
10488 function assumes that you cleared the MPFR flags and then
10489 calculated M to see if anything subsequently set a flag prior to
10490 entering this function. Return NULL_TREE if any checks fail. */
10491
10492 static tree
10493 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10494 {
10495 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10496 overflow/underflow occurred. If -frounding-math, proceed iff the
10497 result of calling FUNC was exact. */
10498 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10499 && (!flag_rounding_math || !inexact))
10500 {
10501 REAL_VALUE_TYPE rr;
10502
10503 real_from_mpfr (&rr, m, type, GMP_RNDN);
10504 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10505 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10506 but the mpft_t is not, then we underflowed in the
10507 conversion. */
10508 if (real_isfinite (&rr)
10509 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10510 {
10511 REAL_VALUE_TYPE rmode;
10512
10513 real_convert (&rmode, TYPE_MODE (type), &rr);
10514 /* Proceed iff the specified mode can hold the value. */
10515 if (real_identical (&rmode, &rr))
10516 return build_real (type, rmode);
10517 }
10518 }
10519 return NULL_TREE;
10520 }
10521
10522 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10523 number and no overflow/underflow occurred. INEXACT is true if M
10524 was not exactly calculated. TYPE is the tree type for the result.
10525 This function assumes that you cleared the MPFR flags and then
10526 calculated M to see if anything subsequently set a flag prior to
10527 entering this function. Return NULL_TREE if any checks fail, if
10528 FORCE_CONVERT is true, then bypass the checks. */
10529
10530 static tree
10531 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10532 {
10533 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10534 overflow/underflow occurred. If -frounding-math, proceed iff the
10535 result of calling FUNC was exact. */
10536 if (force_convert
10537 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10538 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10539 && (!flag_rounding_math || !inexact)))
10540 {
10541 REAL_VALUE_TYPE re, im;
10542
10543 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10544 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10545 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10546 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10547 but the mpft_t is not, then we underflowed in the
10548 conversion. */
10549 if (force_convert
10550 || (real_isfinite (&re) && real_isfinite (&im)
10551 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10552 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10553 {
10554 REAL_VALUE_TYPE re_mode, im_mode;
10555
10556 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10557 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10558 /* Proceed iff the specified mode can hold the value. */
10559 if (force_convert
10560 || (real_identical (&re_mode, &re)
10561 && real_identical (&im_mode, &im)))
10562 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10563 build_real (TREE_TYPE (type), im_mode));
10564 }
10565 }
10566 return NULL_TREE;
10567 }
10568
10569 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
10570 FUNC on it and return the resulting value as a tree with type TYPE.
10571 If MIN and/or MAX are not NULL, then the supplied ARG must be
10572 within those bounds. If INCLUSIVE is true, then MIN/MAX are
10573 acceptable values, otherwise they are not. The mpfr precision is
10574 set to the precision of TYPE. We assume that function FUNC returns
10575 zero if the result could be calculated exactly within the requested
10576 precision. */
10577
10578 static tree
10579 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
10580 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
10581 bool inclusive)
10582 {
10583 tree result = NULL_TREE;
10584
10585 STRIP_NOPS (arg);
10586
10587 /* To proceed, MPFR must exactly represent the target floating point
10588 format, which only happens when the target base equals two. */
10589 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10590 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
10591 {
10592 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
10593
10594 if (real_isfinite (ra)
10595 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
10596 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
10597 {
10598 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10599 const int prec = fmt->p;
10600 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10601 int inexact;
10602 mpfr_t m;
10603
10604 mpfr_init2 (m, prec);
10605 mpfr_from_real (m, ra, GMP_RNDN);
10606 mpfr_clear_flags ();
10607 inexact = func (m, m, rnd);
10608 result = do_mpfr_ckconv (m, type, inexact);
10609 mpfr_clear (m);
10610 }
10611 }
10612
10613 return result;
10614 }
10615
10616 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
10617 FUNC on it and return the resulting value as a tree with type TYPE.
10618 The mpfr precision is set to the precision of TYPE. We assume that
10619 function FUNC returns zero if the result could be calculated
10620 exactly within the requested precision. */
10621
10622 static tree
10623 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
10624 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
10625 {
10626 tree result = NULL_TREE;
10627
10628 STRIP_NOPS (arg1);
10629 STRIP_NOPS (arg2);
10630
10631 /* To proceed, MPFR must exactly represent the target floating point
10632 format, which only happens when the target base equals two. */
10633 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10634 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
10635 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
10636 {
10637 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
10638 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
10639
10640 if (real_isfinite (ra1) && real_isfinite (ra2))
10641 {
10642 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10643 const int prec = fmt->p;
10644 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10645 int inexact;
10646 mpfr_t m1, m2;
10647
10648 mpfr_inits2 (prec, m1, m2, NULL);
10649 mpfr_from_real (m1, ra1, GMP_RNDN);
10650 mpfr_from_real (m2, ra2, GMP_RNDN);
10651 mpfr_clear_flags ();
10652 inexact = func (m1, m1, m2, rnd);
10653 result = do_mpfr_ckconv (m1, type, inexact);
10654 mpfr_clears (m1, m2, NULL);
10655 }
10656 }
10657
10658 return result;
10659 }
10660
10661 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
10662 FUNC on it and return the resulting value as a tree with type TYPE.
10663 The mpfr precision is set to the precision of TYPE. We assume that
10664 function FUNC returns zero if the result could be calculated
10665 exactly within the requested precision. */
10666
10667 static tree
10668 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
10669 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
10670 {
10671 tree result = NULL_TREE;
10672
10673 STRIP_NOPS (arg1);
10674 STRIP_NOPS (arg2);
10675 STRIP_NOPS (arg3);
10676
10677 /* To proceed, MPFR must exactly represent the target floating point
10678 format, which only happens when the target base equals two. */
10679 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10680 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
10681 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
10682 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
10683 {
10684 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
10685 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
10686 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
10687
10688 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
10689 {
10690 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10691 const int prec = fmt->p;
10692 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10693 int inexact;
10694 mpfr_t m1, m2, m3;
10695
10696 mpfr_inits2 (prec, m1, m2, m3, NULL);
10697 mpfr_from_real (m1, ra1, GMP_RNDN);
10698 mpfr_from_real (m2, ra2, GMP_RNDN);
10699 mpfr_from_real (m3, ra3, GMP_RNDN);
10700 mpfr_clear_flags ();
10701 inexact = func (m1, m1, m2, m3, rnd);
10702 result = do_mpfr_ckconv (m1, type, inexact);
10703 mpfr_clears (m1, m2, m3, NULL);
10704 }
10705 }
10706
10707 return result;
10708 }
10709
10710 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
10711 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
10712 If ARG_SINP and ARG_COSP are NULL then the result is returned
10713 as a complex value.
10714 The type is taken from the type of ARG and is used for setting the
10715 precision of the calculation and results. */
10716
10717 static tree
10718 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
10719 {
10720 tree const type = TREE_TYPE (arg);
10721 tree result = NULL_TREE;
10722
10723 STRIP_NOPS (arg);
10724
10725 /* To proceed, MPFR must exactly represent the target floating point
10726 format, which only happens when the target base equals two. */
10727 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10728 && TREE_CODE (arg) == REAL_CST
10729 && !TREE_OVERFLOW (arg))
10730 {
10731 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
10732
10733 if (real_isfinite (ra))
10734 {
10735 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10736 const int prec = fmt->p;
10737 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10738 tree result_s, result_c;
10739 int inexact;
10740 mpfr_t m, ms, mc;
10741
10742 mpfr_inits2 (prec, m, ms, mc, NULL);
10743 mpfr_from_real (m, ra, GMP_RNDN);
10744 mpfr_clear_flags ();
10745 inexact = mpfr_sin_cos (ms, mc, m, rnd);
10746 result_s = do_mpfr_ckconv (ms, type, inexact);
10747 result_c = do_mpfr_ckconv (mc, type, inexact);
10748 mpfr_clears (m, ms, mc, NULL);
10749 if (result_s && result_c)
10750 {
10751 /* If we are to return in a complex value do so. */
10752 if (!arg_sinp && !arg_cosp)
10753 return build_complex (build_complex_type (type),
10754 result_c, result_s);
10755
10756 /* Dereference the sin/cos pointer arguments. */
10757 arg_sinp = build_fold_indirect_ref (arg_sinp);
10758 arg_cosp = build_fold_indirect_ref (arg_cosp);
10759 /* Proceed if valid pointer type were passed in. */
10760 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
10761 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
10762 {
10763 /* Set the values. */
10764 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
10765 result_s);
10766 TREE_SIDE_EFFECTS (result_s) = 1;
10767 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
10768 result_c);
10769 TREE_SIDE_EFFECTS (result_c) = 1;
10770 /* Combine the assignments into a compound expr. */
10771 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10772 result_s, result_c));
10773 }
10774 }
10775 }
10776 }
10777 return result;
10778 }
10779
10780 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
10781 two-argument mpfr order N Bessel function FUNC on them and return
10782 the resulting value as a tree with type TYPE. The mpfr precision
10783 is set to the precision of TYPE. We assume that function FUNC
10784 returns zero if the result could be calculated exactly within the
10785 requested precision. */
10786 static tree
10787 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
10788 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
10789 const REAL_VALUE_TYPE *min, bool inclusive)
10790 {
10791 tree result = NULL_TREE;
10792
10793 STRIP_NOPS (arg1);
10794 STRIP_NOPS (arg2);
10795
10796 /* To proceed, MPFR must exactly represent the target floating point
10797 format, which only happens when the target base equals two. */
10798 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10799 && tree_fits_shwi_p (arg1)
10800 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
10801 {
10802 const HOST_WIDE_INT n = tree_to_shwi (arg1);
10803 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
10804
10805 if (n == (long)n
10806 && real_isfinite (ra)
10807 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
10808 {
10809 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10810 const int prec = fmt->p;
10811 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10812 int inexact;
10813 mpfr_t m;
10814
10815 mpfr_init2 (m, prec);
10816 mpfr_from_real (m, ra, GMP_RNDN);
10817 mpfr_clear_flags ();
10818 inexact = func (m, n, m, rnd);
10819 result = do_mpfr_ckconv (m, type, inexact);
10820 mpfr_clear (m);
10821 }
10822 }
10823
10824 return result;
10825 }
10826
10827 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10828 the pointer *(ARG_QUO) and return the result. The type is taken
10829 from the type of ARG0 and is used for setting the precision of the
10830 calculation and results. */
10831
10832 static tree
10833 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10834 {
10835 tree const type = TREE_TYPE (arg0);
10836 tree result = NULL_TREE;
10837
10838 STRIP_NOPS (arg0);
10839 STRIP_NOPS (arg1);
10840
10841 /* To proceed, MPFR must exactly represent the target floating point
10842 format, which only happens when the target base equals two. */
10843 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10844 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10845 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10846 {
10847 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10848 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10849
10850 if (real_isfinite (ra0) && real_isfinite (ra1))
10851 {
10852 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10853 const int prec = fmt->p;
10854 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10855 tree result_rem;
10856 long integer_quo;
10857 mpfr_t m0, m1;
10858
10859 mpfr_inits2 (prec, m0, m1, NULL);
10860 mpfr_from_real (m0, ra0, GMP_RNDN);
10861 mpfr_from_real (m1, ra1, GMP_RNDN);
10862 mpfr_clear_flags ();
10863 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10864 /* Remquo is independent of the rounding mode, so pass
10865 inexact=0 to do_mpfr_ckconv(). */
10866 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10867 mpfr_clears (m0, m1, NULL);
10868 if (result_rem)
10869 {
10870 /* MPFR calculates quo in the host's long so it may
10871 return more bits in quo than the target int can hold
10872 if sizeof(host long) > sizeof(target int). This can
10873 happen even for native compilers in LP64 mode. In
10874 these cases, modulo the quo value with the largest
10875 number that the target int can hold while leaving one
10876 bit for the sign. */
10877 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10878 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10879
10880 /* Dereference the quo pointer argument. */
10881 arg_quo = build_fold_indirect_ref (arg_quo);
10882 /* Proceed iff a valid pointer type was passed in. */
10883 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10884 {
10885 /* Set the value. */
10886 tree result_quo
10887 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10888 build_int_cst (TREE_TYPE (arg_quo),
10889 integer_quo));
10890 TREE_SIDE_EFFECTS (result_quo) = 1;
10891 /* Combine the quo assignment with the rem. */
10892 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10893 result_quo, result_rem));
10894 }
10895 }
10896 }
10897 }
10898 return result;
10899 }
10900
10901 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10902 resulting value as a tree with type TYPE. The mpfr precision is
10903 set to the precision of TYPE. We assume that this mpfr function
10904 returns zero if the result could be calculated exactly within the
10905 requested precision. In addition, the integer pointer represented
10906 by ARG_SG will be dereferenced and set to the appropriate signgam
10907 (-1,1) value. */
10908
10909 static tree
10910 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10911 {
10912 tree result = NULL_TREE;
10913
10914 STRIP_NOPS (arg);
10915
10916 /* To proceed, MPFR must exactly represent the target floating point
10917 format, which only happens when the target base equals two. Also
10918 verify ARG is a constant and that ARG_SG is an int pointer. */
10919 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10920 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10921 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10922 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10923 {
10924 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10925
10926 /* In addition to NaN and Inf, the argument cannot be zero or a
10927 negative integer. */
10928 if (real_isfinite (ra)
10929 && ra->cl != rvc_zero
10930 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10931 {
10932 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10933 const int prec = fmt->p;
10934 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10935 int inexact, sg;
10936 mpfr_t m;
10937 tree result_lg;
10938
10939 mpfr_init2 (m, prec);
10940 mpfr_from_real (m, ra, GMP_RNDN);
10941 mpfr_clear_flags ();
10942 inexact = mpfr_lgamma (m, &sg, m, rnd);
10943 result_lg = do_mpfr_ckconv (m, type, inexact);
10944 mpfr_clear (m);
10945 if (result_lg)
10946 {
10947 tree result_sg;
10948
10949 /* Dereference the arg_sg pointer argument. */
10950 arg_sg = build_fold_indirect_ref (arg_sg);
10951 /* Assign the signgam value into *arg_sg. */
10952 result_sg = fold_build2 (MODIFY_EXPR,
10953 TREE_TYPE (arg_sg), arg_sg,
10954 build_int_cst (TREE_TYPE (arg_sg), sg));
10955 TREE_SIDE_EFFECTS (result_sg) = 1;
10956 /* Combine the signgam assignment with the lgamma result. */
10957 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10958 result_sg, result_lg));
10959 }
10960 }
10961 }
10962
10963 return result;
10964 }
10965
10966 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
10967 function FUNC on it and return the resulting value as a tree with
10968 type TYPE. The mpfr precision is set to the precision of TYPE. We
10969 assume that function FUNC returns zero if the result could be
10970 calculated exactly within the requested precision. */
10971
10972 static tree
10973 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
10974 {
10975 tree result = NULL_TREE;
10976
10977 STRIP_NOPS (arg);
10978
10979 /* To proceed, MPFR must exactly represent the target floating point
10980 format, which only happens when the target base equals two. */
10981 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
10982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
10983 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
10984 {
10985 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
10986 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
10987
10988 if (real_isfinite (re) && real_isfinite (im))
10989 {
10990 const struct real_format *const fmt =
10991 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10992 const int prec = fmt->p;
10993 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10994 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10995 int inexact;
10996 mpc_t m;
10997
10998 mpc_init2 (m, prec);
10999 mpfr_from_real (mpc_realref (m), re, rnd);
11000 mpfr_from_real (mpc_imagref (m), im, rnd);
11001 mpfr_clear_flags ();
11002 inexact = func (m, m, crnd);
11003 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
11004 mpc_clear (m);
11005 }
11006 }
11007
11008 return result;
11009 }
11010
11011 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11012 mpc function FUNC on it and return the resulting value as a tree
11013 with type TYPE. The mpfr precision is set to the precision of
11014 TYPE. We assume that function FUNC returns zero if the result
11015 could be calculated exactly within the requested precision. If
11016 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11017 in the arguments and/or results. */
11018
11019 tree
11020 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11021 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11022 {
11023 tree result = NULL_TREE;
11024
11025 STRIP_NOPS (arg0);
11026 STRIP_NOPS (arg1);
11027
11028 /* To proceed, MPFR must exactly represent the target floating point
11029 format, which only happens when the target base equals two. */
11030 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11031 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11032 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11033 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11034 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11035 {
11036 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11037 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11038 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11039 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11040
11041 if (do_nonfinite
11042 || (real_isfinite (re0) && real_isfinite (im0)
11043 && real_isfinite (re1) && real_isfinite (im1)))
11044 {
11045 const struct real_format *const fmt =
11046 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11047 const int prec = fmt->p;
11048 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11049 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11050 int inexact;
11051 mpc_t m0, m1;
11052
11053 mpc_init2 (m0, prec);
11054 mpc_init2 (m1, prec);
11055 mpfr_from_real (mpc_realref (m0), re0, rnd);
11056 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11057 mpfr_from_real (mpc_realref (m1), re1, rnd);
11058 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11059 mpfr_clear_flags ();
11060 inexact = func (m0, m0, m1, crnd);
11061 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11062 mpc_clear (m0);
11063 mpc_clear (m1);
11064 }
11065 }
11066
11067 return result;
11068 }
11069
11070 /* A wrapper function for builtin folding that prevents warnings for
11071 "statement without effect" and the like, caused by removing the
11072 call node earlier than the warning is generated. */
11073
11074 tree
11075 fold_call_stmt (gcall *stmt, bool ignore)
11076 {
11077 tree ret = NULL_TREE;
11078 tree fndecl = gimple_call_fndecl (stmt);
11079 location_t loc = gimple_location (stmt);
11080 if (fndecl
11081 && TREE_CODE (fndecl) == FUNCTION_DECL
11082 && DECL_BUILT_IN (fndecl)
11083 && !gimple_call_va_arg_pack_p (stmt))
11084 {
11085 int nargs = gimple_call_num_args (stmt);
11086 tree *args = (nargs > 0
11087 ? gimple_call_arg_ptr (stmt, 0)
11088 : &error_mark_node);
11089
11090 if (avoid_folding_inline_builtin (fndecl))
11091 return NULL_TREE;
11092 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11093 {
11094 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11095 }
11096 else
11097 {
11098 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11099 if (ret)
11100 {
11101 /* Propagate location information from original call to
11102 expansion of builtin. Otherwise things like
11103 maybe_emit_chk_warning, that operate on the expansion
11104 of a builtin, will use the wrong location information. */
11105 if (gimple_has_location (stmt))
11106 {
11107 tree realret = ret;
11108 if (TREE_CODE (ret) == NOP_EXPR)
11109 realret = TREE_OPERAND (ret, 0);
11110 if (CAN_HAVE_LOCATION_P (realret)
11111 && !EXPR_HAS_LOCATION (realret))
11112 SET_EXPR_LOCATION (realret, loc);
11113 return realret;
11114 }
11115 return ret;
11116 }
11117 }
11118 }
11119 return NULL_TREE;
11120 }
11121
11122 /* Look up the function in builtin_decl that corresponds to DECL
11123 and set ASMSPEC as its user assembler name. DECL must be a
11124 function decl that declares a builtin. */
11125
11126 void
11127 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11128 {
11129 tree builtin;
11130 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
11131 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
11132 && asmspec != 0);
11133
11134 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11135 set_user_assembler_name (builtin, asmspec);
11136 switch (DECL_FUNCTION_CODE (decl))
11137 {
11138 case BUILT_IN_MEMCPY:
11139 init_block_move_fn (asmspec);
11140 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
11141 break;
11142 case BUILT_IN_MEMSET:
11143 init_block_clear_fn (asmspec);
11144 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
11145 break;
11146 case BUILT_IN_MEMMOVE:
11147 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
11148 break;
11149 case BUILT_IN_MEMCMP:
11150 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
11151 break;
11152 case BUILT_IN_ABORT:
11153 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
11154 break;
11155 case BUILT_IN_FFS:
11156 if (INT_TYPE_SIZE < BITS_PER_WORD)
11157 {
11158 set_user_assembler_libfunc ("ffs", asmspec);
11159 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
11160 MODE_INT, 0), "ffs");
11161 }
11162 break;
11163 default:
11164 break;
11165 }
11166 }
11167
11168 /* Return true if DECL is a builtin that expands to a constant or similarly
11169 simple code. */
11170 bool
11171 is_simple_builtin (tree decl)
11172 {
11173 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11174 switch (DECL_FUNCTION_CODE (decl))
11175 {
11176 /* Builtins that expand to constants. */
11177 case BUILT_IN_CONSTANT_P:
11178 case BUILT_IN_EXPECT:
11179 case BUILT_IN_OBJECT_SIZE:
11180 case BUILT_IN_UNREACHABLE:
11181 /* Simple register moves or loads from stack. */
11182 case BUILT_IN_ASSUME_ALIGNED:
11183 case BUILT_IN_RETURN_ADDRESS:
11184 case BUILT_IN_EXTRACT_RETURN_ADDR:
11185 case BUILT_IN_FROB_RETURN_ADDR:
11186 case BUILT_IN_RETURN:
11187 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11188 case BUILT_IN_FRAME_ADDRESS:
11189 case BUILT_IN_VA_END:
11190 case BUILT_IN_STACK_SAVE:
11191 case BUILT_IN_STACK_RESTORE:
11192 /* Exception state returns or moves registers around. */
11193 case BUILT_IN_EH_FILTER:
11194 case BUILT_IN_EH_POINTER:
11195 case BUILT_IN_EH_COPY_VALUES:
11196 return true;
11197
11198 default:
11199 return false;
11200 }
11201
11202 return false;
11203 }
11204
11205 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11206 most probably expanded inline into reasonably simple code. This is a
11207 superset of is_simple_builtin. */
11208 bool
11209 is_inexpensive_builtin (tree decl)
11210 {
11211 if (!decl)
11212 return false;
11213 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11214 return true;
11215 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11216 switch (DECL_FUNCTION_CODE (decl))
11217 {
11218 case BUILT_IN_ABS:
11219 case BUILT_IN_ALLOCA:
11220 case BUILT_IN_ALLOCA_WITH_ALIGN:
11221 case BUILT_IN_BSWAP16:
11222 case BUILT_IN_BSWAP32:
11223 case BUILT_IN_BSWAP64:
11224 case BUILT_IN_CLZ:
11225 case BUILT_IN_CLZIMAX:
11226 case BUILT_IN_CLZL:
11227 case BUILT_IN_CLZLL:
11228 case BUILT_IN_CTZ:
11229 case BUILT_IN_CTZIMAX:
11230 case BUILT_IN_CTZL:
11231 case BUILT_IN_CTZLL:
11232 case BUILT_IN_FFS:
11233 case BUILT_IN_FFSIMAX:
11234 case BUILT_IN_FFSL:
11235 case BUILT_IN_FFSLL:
11236 case BUILT_IN_IMAXABS:
11237 case BUILT_IN_FINITE:
11238 case BUILT_IN_FINITEF:
11239 case BUILT_IN_FINITEL:
11240 case BUILT_IN_FINITED32:
11241 case BUILT_IN_FINITED64:
11242 case BUILT_IN_FINITED128:
11243 case BUILT_IN_FPCLASSIFY:
11244 case BUILT_IN_ISFINITE:
11245 case BUILT_IN_ISINF_SIGN:
11246 case BUILT_IN_ISINF:
11247 case BUILT_IN_ISINFF:
11248 case BUILT_IN_ISINFL:
11249 case BUILT_IN_ISINFD32:
11250 case BUILT_IN_ISINFD64:
11251 case BUILT_IN_ISINFD128:
11252 case BUILT_IN_ISNAN:
11253 case BUILT_IN_ISNANF:
11254 case BUILT_IN_ISNANL:
11255 case BUILT_IN_ISNAND32:
11256 case BUILT_IN_ISNAND64:
11257 case BUILT_IN_ISNAND128:
11258 case BUILT_IN_ISNORMAL:
11259 case BUILT_IN_ISGREATER:
11260 case BUILT_IN_ISGREATEREQUAL:
11261 case BUILT_IN_ISLESS:
11262 case BUILT_IN_ISLESSEQUAL:
11263 case BUILT_IN_ISLESSGREATER:
11264 case BUILT_IN_ISUNORDERED:
11265 case BUILT_IN_VA_ARG_PACK:
11266 case BUILT_IN_VA_ARG_PACK_LEN:
11267 case BUILT_IN_VA_COPY:
11268 case BUILT_IN_TRAP:
11269 case BUILT_IN_SAVEREGS:
11270 case BUILT_IN_POPCOUNTL:
11271 case BUILT_IN_POPCOUNTLL:
11272 case BUILT_IN_POPCOUNTIMAX:
11273 case BUILT_IN_POPCOUNT:
11274 case BUILT_IN_PARITYL:
11275 case BUILT_IN_PARITYLL:
11276 case BUILT_IN_PARITYIMAX:
11277 case BUILT_IN_PARITY:
11278 case BUILT_IN_LABS:
11279 case BUILT_IN_LLABS:
11280 case BUILT_IN_PREFETCH:
11281 case BUILT_IN_ACC_ON_DEVICE:
11282 return true;
11283
11284 default:
11285 return is_simple_builtin (decl);
11286 }
11287
11288 return false;
11289 }