Rename logb and significand folds
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "predict.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "tree-object-size.h"
39 #include "realmpfr.h"
40 #include "cfgrtl.h"
41 #include "internal-fn.h"
42 #include "flags.h"
43 #include "regs.h"
44 #include "except.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "cgraph.h"
69 #include "tree-chkp.h"
70 #include "rtl-chkp.h"
71
72
73 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89 #undef DEF_BUILTIN
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, machine_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
134 machine_mode, int, tree);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree, bool);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_bitop (tree, tree);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_isascii (location_t, tree);
166 static tree fold_builtin_toascii (location_t, tree);
167 static tree fold_builtin_isdigit (location_t, tree);
168 static tree fold_builtin_fabs (location_t, tree, tree);
169 static tree fold_builtin_abs (location_t, tree, tree);
170 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
171 enum tree_code);
172 static tree fold_builtin_0 (location_t, tree);
173 static tree fold_builtin_1 (location_t, tree, tree);
174 static tree fold_builtin_2 (location_t, tree, tree, tree);
175 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_varargs (location_t, tree, tree*, int);
177
178 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
179 static tree fold_builtin_strstr (location_t, tree, tree, tree);
180 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
181 static tree fold_builtin_strspn (location_t, tree, tree);
182 static tree fold_builtin_strcspn (location_t, tree, tree);
183
184 static rtx expand_builtin_object_size (tree);
185 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
186 enum built_in_function);
187 static void maybe_emit_chk_warning (tree, enum built_in_function);
188 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
189 static void maybe_emit_free_warning (tree);
190 static tree fold_builtin_object_size (tree, tree);
191
192 unsigned HOST_WIDE_INT target_newline;
193 unsigned HOST_WIDE_INT target_percent;
194 static unsigned HOST_WIDE_INT target_c;
195 static unsigned HOST_WIDE_INT target_s;
196 char target_percent_c[3];
197 char target_percent_s[3];
198 char target_percent_s_newline[4];
199 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
200 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
201 static tree do_mpfr_arg2 (tree, tree, tree,
202 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
203 static tree do_mpfr_arg3 (tree, tree, tree, tree,
204 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
205 static tree do_mpfr_sincos (tree, tree, tree);
206 static tree do_mpfr_bessel_n (tree, tree, tree,
207 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
208 const REAL_VALUE_TYPE *, bool);
209 static tree do_mpfr_remquo (tree, tree, tree);
210 static tree do_mpfr_lgamma_r (tree, tree, tree);
211 static void expand_builtin_sync_synchronize (void);
212
213 /* Return true if NAME starts with __builtin_ or __sync_. */
214
215 static bool
216 is_builtin_name (const char *name)
217 {
218 if (strncmp (name, "__builtin_", 10) == 0)
219 return true;
220 if (strncmp (name, "__sync_", 7) == 0)
221 return true;
222 if (strncmp (name, "__atomic_", 9) == 0)
223 return true;
224 if (flag_cilkplus
225 && (!strcmp (name, "__cilkrts_detach")
226 || !strcmp (name, "__cilkrts_pop_frame")))
227 return true;
228 return false;
229 }
230
231
232 /* Return true if DECL is a function symbol representing a built-in. */
233
234 bool
235 is_builtin_fn (tree decl)
236 {
237 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
238 }
239
240 /* Return true if NODE should be considered for inline expansion regardless
241 of the optimization level. This means whenever a function is invoked with
242 its "internal" name, which normally contains the prefix "__builtin". */
243
244 static bool
245 called_as_built_in (tree node)
246 {
247 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
248 we want the name used to call the function, not the name it
249 will have. */
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 return is_builtin_name (name);
252 }
253
254 /* Compute values M and N such that M divides (address of EXP - N) and such
255 that N < M. If these numbers can be determined, store M in alignp and N in
256 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
257 *alignp and any bit-offset to *bitposp.
258
259 Note that the address (and thus the alignment) computed here is based
260 on the address to which a symbol resolves, whereas DECL_ALIGN is based
261 on the address at which an object is actually located. These two
262 addresses are not always the same. For example, on ARM targets,
263 the address &foo of a Thumb function foo() has the lowest bit set,
264 whereas foo() itself starts on an even address.
265
266 If ADDR_P is true we are taking the address of the memory reference EXP
267 and thus cannot rely on the access taking place. */
268
269 static bool
270 get_object_alignment_2 (tree exp, unsigned int *alignp,
271 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
272 {
273 HOST_WIDE_INT bitsize, bitpos;
274 tree offset;
275 machine_mode mode;
276 int unsignedp, volatilep;
277 unsigned int align = BITS_PER_UNIT;
278 bool known_alignment = false;
279
280 /* Get the innermost object and the constant (bitpos) and possibly
281 variable (offset) offset of the access. */
282 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
283 &mode, &unsignedp, &volatilep, true);
284
285 /* Extract alignment information from the innermost object and
286 possibly adjust bitpos and offset. */
287 if (TREE_CODE (exp) == FUNCTION_DECL)
288 {
289 /* Function addresses can encode extra information besides their
290 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
291 allows the low bit to be used as a virtual bit, we know
292 that the address itself must be at least 2-byte aligned. */
293 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
294 align = 2 * BITS_PER_UNIT;
295 }
296 else if (TREE_CODE (exp) == LABEL_DECL)
297 ;
298 else if (TREE_CODE (exp) == CONST_DECL)
299 {
300 /* The alignment of a CONST_DECL is determined by its initializer. */
301 exp = DECL_INITIAL (exp);
302 align = TYPE_ALIGN (TREE_TYPE (exp));
303 if (CONSTANT_CLASS_P (exp))
304 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
305
306 known_alignment = true;
307 }
308 else if (DECL_P (exp))
309 {
310 align = DECL_ALIGN (exp);
311 known_alignment = true;
312 }
313 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
314 {
315 align = TYPE_ALIGN (TREE_TYPE (exp));
316 }
317 else if (TREE_CODE (exp) == INDIRECT_REF
318 || TREE_CODE (exp) == MEM_REF
319 || TREE_CODE (exp) == TARGET_MEM_REF)
320 {
321 tree addr = TREE_OPERAND (exp, 0);
322 unsigned ptr_align;
323 unsigned HOST_WIDE_INT ptr_bitpos;
324 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
325
326 /* If the address is explicitely aligned, handle that. */
327 if (TREE_CODE (addr) == BIT_AND_EXPR
328 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
329 {
330 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
331 ptr_bitmask *= BITS_PER_UNIT;
332 align = ptr_bitmask & -ptr_bitmask;
333 addr = TREE_OPERAND (addr, 0);
334 }
335
336 known_alignment
337 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
338 align = MAX (ptr_align, align);
339
340 /* Re-apply explicit alignment to the bitpos. */
341 ptr_bitpos &= ptr_bitmask;
342
343 /* The alignment of the pointer operand in a TARGET_MEM_REF
344 has to take the variable offset parts into account. */
345 if (TREE_CODE (exp) == TARGET_MEM_REF)
346 {
347 if (TMR_INDEX (exp))
348 {
349 unsigned HOST_WIDE_INT step = 1;
350 if (TMR_STEP (exp))
351 step = TREE_INT_CST_LOW (TMR_STEP (exp));
352 align = MIN (align, (step & -step) * BITS_PER_UNIT);
353 }
354 if (TMR_INDEX2 (exp))
355 align = BITS_PER_UNIT;
356 known_alignment = false;
357 }
358
359 /* When EXP is an actual memory reference then we can use
360 TYPE_ALIGN of a pointer indirection to derive alignment.
361 Do so only if get_pointer_alignment_1 did not reveal absolute
362 alignment knowledge and if using that alignment would
363 improve the situation. */
364 if (!addr_p && !known_alignment
365 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
366 align = TYPE_ALIGN (TREE_TYPE (exp));
367 else
368 {
369 /* Else adjust bitpos accordingly. */
370 bitpos += ptr_bitpos;
371 if (TREE_CODE (exp) == MEM_REF
372 || TREE_CODE (exp) == TARGET_MEM_REF)
373 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
374 }
375 }
376 else if (TREE_CODE (exp) == STRING_CST)
377 {
378 /* STRING_CST are the only constant objects we allow to be not
379 wrapped inside a CONST_DECL. */
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 if (CONSTANT_CLASS_P (exp))
382 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
383
384 known_alignment = true;
385 }
386
387 /* If there is a non-constant offset part extract the maximum
388 alignment that can prevail. */
389 if (offset)
390 {
391 unsigned int trailing_zeros = tree_ctz (offset);
392 if (trailing_zeros < HOST_BITS_PER_INT)
393 {
394 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
395 if (inner)
396 align = MIN (align, inner);
397 }
398 }
399
400 *alignp = align;
401 *bitposp = bitpos & (*alignp - 1);
402 return known_alignment;
403 }
404
405 /* For a memory reference expression EXP compute values M and N such that M
406 divides (&EXP - N) and such that N < M. If these numbers can be determined,
407 store M in alignp and N in *BITPOSP and return true. Otherwise return false
408 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
409
410 bool
411 get_object_alignment_1 (tree exp, unsigned int *alignp,
412 unsigned HOST_WIDE_INT *bitposp)
413 {
414 return get_object_alignment_2 (exp, alignp, bitposp, false);
415 }
416
417 /* Return the alignment in bits of EXP, an object. */
418
419 unsigned int
420 get_object_alignment (tree exp)
421 {
422 unsigned HOST_WIDE_INT bitpos = 0;
423 unsigned int align;
424
425 get_object_alignment_1 (exp, &align, &bitpos);
426
427 /* align and bitpos now specify known low bits of the pointer.
428 ptr & (align - 1) == bitpos. */
429
430 if (bitpos != 0)
431 align = (bitpos & -bitpos);
432 return align;
433 }
434
435 /* For a pointer valued expression EXP compute values M and N such that M
436 divides (EXP - N) and such that N < M. If these numbers can be determined,
437 store M in alignp and N in *BITPOSP and return true. Return false if
438 the results are just a conservative approximation.
439
440 If EXP is not a pointer, false is returned too. */
441
442 bool
443 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
444 unsigned HOST_WIDE_INT *bitposp)
445 {
446 STRIP_NOPS (exp);
447
448 if (TREE_CODE (exp) == ADDR_EXPR)
449 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
450 alignp, bitposp, true);
451 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
452 {
453 unsigned int align;
454 unsigned HOST_WIDE_INT bitpos;
455 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
456 &align, &bitpos);
457 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
458 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
459 else
460 {
461 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
462 if (trailing_zeros < HOST_BITS_PER_INT)
463 {
464 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
465 if (inner)
466 align = MIN (align, inner);
467 }
468 }
469 *alignp = align;
470 *bitposp = bitpos & (align - 1);
471 return res;
472 }
473 else if (TREE_CODE (exp) == SSA_NAME
474 && POINTER_TYPE_P (TREE_TYPE (exp)))
475 {
476 unsigned int ptr_align, ptr_misalign;
477 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
478
479 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
480 {
481 *bitposp = ptr_misalign * BITS_PER_UNIT;
482 *alignp = ptr_align * BITS_PER_UNIT;
483 /* We cannot really tell whether this result is an approximation. */
484 return true;
485 }
486 else
487 {
488 *bitposp = 0;
489 *alignp = BITS_PER_UNIT;
490 return false;
491 }
492 }
493 else if (TREE_CODE (exp) == INTEGER_CST)
494 {
495 *alignp = BIGGEST_ALIGNMENT;
496 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
497 & (BIGGEST_ALIGNMENT - 1));
498 return true;
499 }
500
501 *bitposp = 0;
502 *alignp = BITS_PER_UNIT;
503 return false;
504 }
505
506 /* Return the alignment in bits of EXP, a pointer valued expression.
507 The alignment returned is, by default, the alignment of the thing that
508 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
509
510 Otherwise, look at the expression to see if we can do better, i.e., if the
511 expression is actually pointing at an object whose alignment is tighter. */
512
513 unsigned int
514 get_pointer_alignment (tree exp)
515 {
516 unsigned HOST_WIDE_INT bitpos = 0;
517 unsigned int align;
518
519 get_pointer_alignment_1 (exp, &align, &bitpos);
520
521 /* align and bitpos now specify known low bits of the pointer.
522 ptr & (align - 1) == bitpos. */
523
524 if (bitpos != 0)
525 align = (bitpos & -bitpos);
526
527 return align;
528 }
529
530 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
531 way, because it could contain a zero byte in the middle.
532 TREE_STRING_LENGTH is the size of the character array, not the string.
533
534 ONLY_VALUE should be nonzero if the result is not going to be emitted
535 into the instruction stream and zero if it is going to be expanded.
536 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
537 is returned, otherwise NULL, since
538 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
539 evaluate the side-effects.
540
541 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
542 accesses. Note that this implies the result is not going to be emitted
543 into the instruction stream.
544
545 The value returned is of type `ssizetype'.
546
547 Unfortunately, string_constant can't access the values of const char
548 arrays with initializers, so neither can we do so here. */
549
550 tree
551 c_strlen (tree src, int only_value)
552 {
553 tree offset_node;
554 HOST_WIDE_INT offset;
555 int max;
556 const char *ptr;
557 location_t loc;
558
559 STRIP_NOPS (src);
560 if (TREE_CODE (src) == COND_EXPR
561 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
562 {
563 tree len1, len2;
564
565 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
566 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
567 if (tree_int_cst_equal (len1, len2))
568 return len1;
569 }
570
571 if (TREE_CODE (src) == COMPOUND_EXPR
572 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
573 return c_strlen (TREE_OPERAND (src, 1), only_value);
574
575 loc = EXPR_LOC_OR_LOC (src, input_location);
576
577 src = string_constant (src, &offset_node);
578 if (src == 0)
579 return NULL_TREE;
580
581 max = TREE_STRING_LENGTH (src) - 1;
582 ptr = TREE_STRING_POINTER (src);
583
584 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
585 {
586 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
587 compute the offset to the following null if we don't know where to
588 start searching for it. */
589 int i;
590
591 for (i = 0; i < max; i++)
592 if (ptr[i] == 0)
593 return NULL_TREE;
594
595 /* We don't know the starting offset, but we do know that the string
596 has no internal zero bytes. We can assume that the offset falls
597 within the bounds of the string; otherwise, the programmer deserves
598 what he gets. Subtract the offset from the length of the string,
599 and return that. This would perhaps not be valid if we were dealing
600 with named arrays in addition to literal string constants. */
601
602 return size_diffop_loc (loc, size_int (max), offset_node);
603 }
604
605 /* We have a known offset into the string. Start searching there for
606 a null character if we can represent it as a single HOST_WIDE_INT. */
607 if (offset_node == 0)
608 offset = 0;
609 else if (! tree_fits_shwi_p (offset_node))
610 offset = -1;
611 else
612 offset = tree_to_shwi (offset_node);
613
614 /* If the offset is known to be out of bounds, warn, and call strlen at
615 runtime. */
616 if (offset < 0 || offset > max)
617 {
618 /* Suppress multiple warnings for propagated constant strings. */
619 if (only_value != 2
620 && !TREE_NO_WARNING (src))
621 {
622 warning_at (loc, 0, "offset outside bounds of constant string");
623 TREE_NO_WARNING (src) = 1;
624 }
625 return NULL_TREE;
626 }
627
628 /* Use strlen to search for the first zero byte. Since any strings
629 constructed with build_string will have nulls appended, we win even
630 if we get handed something like (char[4])"abcd".
631
632 Since OFFSET is our starting index into the string, no further
633 calculation is needed. */
634 return ssize_int (strlen (ptr + offset));
635 }
636
637 /* Return a char pointer for a C string if it is a string constant
638 or sum of string constant and integer constant. */
639
640 const char *
641 c_getstr (tree src)
642 {
643 tree offset_node;
644
645 src = string_constant (src, &offset_node);
646 if (src == 0)
647 return 0;
648
649 if (offset_node == 0)
650 return TREE_STRING_POINTER (src);
651 else if (!tree_fits_uhwi_p (offset_node)
652 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
653 return 0;
654
655 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
656 }
657
658 /* Return a constant integer corresponding to target reading
659 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
660
661 static rtx
662 c_readstr (const char *str, machine_mode mode)
663 {
664 HOST_WIDE_INT ch;
665 unsigned int i, j;
666 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
667
668 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
669 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
670 / HOST_BITS_PER_WIDE_INT;
671
672 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
673 for (i = 0; i < len; i++)
674 tmp[i] = 0;
675
676 ch = 1;
677 for (i = 0; i < GET_MODE_SIZE (mode); i++)
678 {
679 j = i;
680 if (WORDS_BIG_ENDIAN)
681 j = GET_MODE_SIZE (mode) - i - 1;
682 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
683 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
684 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
685 j *= BITS_PER_UNIT;
686
687 if (ch)
688 ch = (unsigned char) str[i];
689 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
690 }
691
692 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
693 return immed_wide_int_const (c, mode);
694 }
695
696 /* Cast a target constant CST to target CHAR and if that value fits into
697 host char type, return zero and put that value into variable pointed to by
698 P. */
699
700 static int
701 target_char_cast (tree cst, char *p)
702 {
703 unsigned HOST_WIDE_INT val, hostval;
704
705 if (TREE_CODE (cst) != INTEGER_CST
706 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
707 return 1;
708
709 /* Do not care if it fits or not right here. */
710 val = TREE_INT_CST_LOW (cst);
711
712 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
713 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
714
715 hostval = val;
716 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
717 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
718
719 if (val != hostval)
720 return 1;
721
722 *p = hostval;
723 return 0;
724 }
725
726 /* Similar to save_expr, but assumes that arbitrary code is not executed
727 in between the multiple evaluations. In particular, we assume that a
728 non-addressable local variable will not be modified. */
729
730 static tree
731 builtin_save_expr (tree exp)
732 {
733 if (TREE_CODE (exp) == SSA_NAME
734 || (TREE_ADDRESSABLE (exp) == 0
735 && (TREE_CODE (exp) == PARM_DECL
736 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
737 return exp;
738
739 return save_expr (exp);
740 }
741
742 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
743 times to get the address of either a higher stack frame, or a return
744 address located within it (depending on FNDECL_CODE). */
745
746 static rtx
747 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
748 {
749 int i;
750 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
751 if (tem == NULL_RTX)
752 {
753 /* For a zero count with __builtin_return_address, we don't care what
754 frame address we return, because target-specific definitions will
755 override us. Therefore frame pointer elimination is OK, and using
756 the soft frame pointer is OK.
757
758 For a nonzero count, or a zero count with __builtin_frame_address,
759 we require a stable offset from the current frame pointer to the
760 previous one, so we must use the hard frame pointer, and
761 we must disable frame pointer elimination. */
762 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
763 tem = frame_pointer_rtx;
764 else
765 {
766 tem = hard_frame_pointer_rtx;
767
768 /* Tell reload not to eliminate the frame pointer. */
769 crtl->accesses_prior_frames = 1;
770 }
771 }
772
773 if (count > 0)
774 SETUP_FRAME_ADDRESSES ();
775
776 /* On the SPARC, the return address is not in the frame, it is in a
777 register. There is no way to access it off of the current frame
778 pointer, but it can be accessed off the previous frame pointer by
779 reading the value from the register window save area. */
780 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
781 count--;
782
783 /* Scan back COUNT frames to the specified frame. */
784 for (i = 0; i < count; i++)
785 {
786 /* Assume the dynamic chain pointer is in the word that the
787 frame address points to, unless otherwise specified. */
788 tem = DYNAMIC_CHAIN_ADDRESS (tem);
789 tem = memory_address (Pmode, tem);
790 tem = gen_frame_mem (Pmode, tem);
791 tem = copy_to_reg (tem);
792 }
793
794 /* For __builtin_frame_address, return what we've got. But, on
795 the SPARC for example, we may have to add a bias. */
796 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
797 return FRAME_ADDR_RTX (tem);
798
799 /* For __builtin_return_address, get the return address from that frame. */
800 #ifdef RETURN_ADDR_RTX
801 tem = RETURN_ADDR_RTX (count, tem);
802 #else
803 tem = memory_address (Pmode,
804 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
805 tem = gen_frame_mem (Pmode, tem);
806 #endif
807 return tem;
808 }
809
810 /* Alias set used for setjmp buffer. */
811 static alias_set_type setjmp_alias_set = -1;
812
813 /* Construct the leading half of a __builtin_setjmp call. Control will
814 return to RECEIVER_LABEL. This is also called directly by the SJLJ
815 exception handling code. */
816
817 void
818 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
819 {
820 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
821 rtx stack_save;
822 rtx mem;
823
824 if (setjmp_alias_set == -1)
825 setjmp_alias_set = new_alias_set ();
826
827 buf_addr = convert_memory_address (Pmode, buf_addr);
828
829 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
830
831 /* We store the frame pointer and the address of receiver_label in
832 the buffer and use the rest of it for the stack save area, which
833 is machine-dependent. */
834
835 mem = gen_rtx_MEM (Pmode, buf_addr);
836 set_mem_alias_set (mem, setjmp_alias_set);
837 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
838
839 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
840 GET_MODE_SIZE (Pmode))),
841 set_mem_alias_set (mem, setjmp_alias_set);
842
843 emit_move_insn (validize_mem (mem),
844 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
845
846 stack_save = gen_rtx_MEM (sa_mode,
847 plus_constant (Pmode, buf_addr,
848 2 * GET_MODE_SIZE (Pmode)));
849 set_mem_alias_set (stack_save, setjmp_alias_set);
850 emit_stack_save (SAVE_NONLOCAL, &stack_save);
851
852 /* If there is further processing to do, do it. */
853 if (targetm.have_builtin_setjmp_setup ())
854 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
855
856 /* We have a nonlocal label. */
857 cfun->has_nonlocal_label = 1;
858 }
859
860 /* Construct the trailing part of a __builtin_setjmp call. This is
861 also called directly by the SJLJ exception handling code.
862 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
863
864 void
865 expand_builtin_setjmp_receiver (rtx receiver_label)
866 {
867 rtx chain;
868
869 /* Mark the FP as used when we get here, so we have to make sure it's
870 marked as used by this function. */
871 emit_use (hard_frame_pointer_rtx);
872
873 /* Mark the static chain as clobbered here so life information
874 doesn't get messed up for it. */
875 chain = targetm.calls.static_chain (current_function_decl, true);
876 if (chain && REG_P (chain))
877 emit_clobber (chain);
878
879 /* Now put in the code to restore the frame pointer, and argument
880 pointer, if needed. */
881 if (! targetm.have_nonlocal_goto ())
882 {
883 /* First adjust our frame pointer to its actual value. It was
884 previously set to the start of the virtual area corresponding to
885 the stacked variables when we branched here and now needs to be
886 adjusted to the actual hardware fp value.
887
888 Assignments to virtual registers are converted by
889 instantiate_virtual_regs into the corresponding assignment
890 to the underlying register (fp in this case) that makes
891 the original assignment true.
892 So the following insn will actually be decrementing fp by
893 STARTING_FRAME_OFFSET. */
894 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
895
896 /* Restoring the frame pointer also modifies the hard frame pointer.
897 Mark it used (so that the previous assignment remains live once
898 the frame pointer is eliminated) and clobbered (to represent the
899 implicit update from the assignment). */
900 emit_use (hard_frame_pointer_rtx);
901 emit_clobber (hard_frame_pointer_rtx);
902 }
903
904 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
905 {
906 #ifdef ELIMINABLE_REGS
907 /* If the argument pointer can be eliminated in favor of the
908 frame pointer, we don't need to restore it. We assume here
909 that if such an elimination is present, it can always be used.
910 This is the case on all known machines; if we don't make this
911 assumption, we do unnecessary saving on many machines. */
912 size_t i;
913 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
914
915 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
916 if (elim_regs[i].from == ARG_POINTER_REGNUM
917 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
918 break;
919
920 if (i == ARRAY_SIZE (elim_regs))
921 #endif
922 {
923 /* Now restore our arg pointer from the address at which it
924 was saved in our stack frame. */
925 emit_move_insn (crtl->args.internal_arg_pointer,
926 copy_to_reg (get_arg_pointer_save_area ()));
927 }
928 }
929
930 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
931 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
932 else if (targetm.have_nonlocal_goto_receiver ())
933 emit_insn (targetm.gen_nonlocal_goto_receiver ());
934 else
935 { /* Nothing */ }
936
937 /* We must not allow the code we just generated to be reordered by
938 scheduling. Specifically, the update of the frame pointer must
939 happen immediately, not later. */
940 emit_insn (gen_blockage ());
941 }
942
943 /* __builtin_longjmp is passed a pointer to an array of five words (not
944 all will be used on all machines). It operates similarly to the C
945 library function of the same name, but is more efficient. Much of
946 the code below is copied from the handling of non-local gotos. */
947
948 static void
949 expand_builtin_longjmp (rtx buf_addr, rtx value)
950 {
951 rtx fp, lab, stack;
952 rtx_insn *insn, *last;
953 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
954
955 /* DRAP is needed for stack realign if longjmp is expanded to current
956 function */
957 if (SUPPORTS_STACK_ALIGNMENT)
958 crtl->need_drap = true;
959
960 if (setjmp_alias_set == -1)
961 setjmp_alias_set = new_alias_set ();
962
963 buf_addr = convert_memory_address (Pmode, buf_addr);
964
965 buf_addr = force_reg (Pmode, buf_addr);
966
967 /* We require that the user must pass a second argument of 1, because
968 that is what builtin_setjmp will return. */
969 gcc_assert (value == const1_rtx);
970
971 last = get_last_insn ();
972 if (targetm.have_builtin_longjmp ())
973 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
974 else
975 {
976 fp = gen_rtx_MEM (Pmode, buf_addr);
977 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
978 GET_MODE_SIZE (Pmode)));
979
980 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
981 2 * GET_MODE_SIZE (Pmode)));
982 set_mem_alias_set (fp, setjmp_alias_set);
983 set_mem_alias_set (lab, setjmp_alias_set);
984 set_mem_alias_set (stack, setjmp_alias_set);
985
986 /* Pick up FP, label, and SP from the block and jump. This code is
987 from expand_goto in stmt.c; see there for detailed comments. */
988 if (targetm.have_nonlocal_goto ())
989 /* We have to pass a value to the nonlocal_goto pattern that will
990 get copied into the static_chain pointer, but it does not matter
991 what that value is, because builtin_setjmp does not use it. */
992 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
993 else
994 {
995 lab = copy_to_reg (lab);
996
997 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
998 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
999
1000 emit_move_insn (hard_frame_pointer_rtx, fp);
1001 emit_stack_restore (SAVE_NONLOCAL, stack);
1002
1003 emit_use (hard_frame_pointer_rtx);
1004 emit_use (stack_pointer_rtx);
1005 emit_indirect_jump (lab);
1006 }
1007 }
1008
1009 /* Search backwards and mark the jump insn as a non-local goto.
1010 Note that this precludes the use of __builtin_longjmp to a
1011 __builtin_setjmp target in the same function. However, we've
1012 already cautioned the user that these functions are for
1013 internal exception handling use only. */
1014 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1015 {
1016 gcc_assert (insn != last);
1017
1018 if (JUMP_P (insn))
1019 {
1020 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1021 break;
1022 }
1023 else if (CALL_P (insn))
1024 break;
1025 }
1026 }
1027
1028 static inline bool
1029 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1030 {
1031 return (iter->i < iter->n);
1032 }
1033
1034 /* This function validates the types of a function call argument list
1035 against a specified list of tree_codes. If the last specifier is a 0,
1036 that represents an ellipses, otherwise the last specifier must be a
1037 VOID_TYPE. */
1038
1039 static bool
1040 validate_arglist (const_tree callexpr, ...)
1041 {
1042 enum tree_code code;
1043 bool res = 0;
1044 va_list ap;
1045 const_call_expr_arg_iterator iter;
1046 const_tree arg;
1047
1048 va_start (ap, callexpr);
1049 init_const_call_expr_arg_iterator (callexpr, &iter);
1050
1051 do
1052 {
1053 code = (enum tree_code) va_arg (ap, int);
1054 switch (code)
1055 {
1056 case 0:
1057 /* This signifies an ellipses, any further arguments are all ok. */
1058 res = true;
1059 goto end;
1060 case VOID_TYPE:
1061 /* This signifies an endlink, if no arguments remain, return
1062 true, otherwise return false. */
1063 res = !more_const_call_expr_args_p (&iter);
1064 goto end;
1065 default:
1066 /* If no parameters remain or the parameter's code does not
1067 match the specified code, return false. Otherwise continue
1068 checking any remaining arguments. */
1069 arg = next_const_call_expr_arg (&iter);
1070 if (!validate_arg (arg, code))
1071 goto end;
1072 break;
1073 }
1074 }
1075 while (1);
1076
1077 /* We need gotos here since we can only have one VA_CLOSE in a
1078 function. */
1079 end: ;
1080 va_end (ap);
1081
1082 return res;
1083 }
1084
1085 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1086 and the address of the save area. */
1087
1088 static rtx
1089 expand_builtin_nonlocal_goto (tree exp)
1090 {
1091 tree t_label, t_save_area;
1092 rtx r_label, r_save_area, r_fp, r_sp;
1093 rtx_insn *insn;
1094
1095 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1096 return NULL_RTX;
1097
1098 t_label = CALL_EXPR_ARG (exp, 0);
1099 t_save_area = CALL_EXPR_ARG (exp, 1);
1100
1101 r_label = expand_normal (t_label);
1102 r_label = convert_memory_address (Pmode, r_label);
1103 r_save_area = expand_normal (t_save_area);
1104 r_save_area = convert_memory_address (Pmode, r_save_area);
1105 /* Copy the address of the save location to a register just in case it was
1106 based on the frame pointer. */
1107 r_save_area = copy_to_reg (r_save_area);
1108 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1109 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1110 plus_constant (Pmode, r_save_area,
1111 GET_MODE_SIZE (Pmode)));
1112
1113 crtl->has_nonlocal_goto = 1;
1114
1115 /* ??? We no longer need to pass the static chain value, afaik. */
1116 if (targetm.have_nonlocal_goto ())
1117 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1118 else
1119 {
1120 r_label = copy_to_reg (r_label);
1121
1122 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1123 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1124
1125 /* Restore frame pointer for containing function. */
1126 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1127 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1128
1129 /* USE of hard_frame_pointer_rtx added for consistency;
1130 not clear if really needed. */
1131 emit_use (hard_frame_pointer_rtx);
1132 emit_use (stack_pointer_rtx);
1133
1134 /* If the architecture is using a GP register, we must
1135 conservatively assume that the target function makes use of it.
1136 The prologue of functions with nonlocal gotos must therefore
1137 initialize the GP register to the appropriate value, and we
1138 must then make sure that this value is live at the point
1139 of the jump. (Note that this doesn't necessarily apply
1140 to targets with a nonlocal_goto pattern; they are free
1141 to implement it in their own way. Note also that this is
1142 a no-op if the GP register is a global invariant.) */
1143 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1144 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1145 emit_use (pic_offset_table_rtx);
1146
1147 emit_indirect_jump (r_label);
1148 }
1149
1150 /* Search backwards to the jump insn and mark it as a
1151 non-local goto. */
1152 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1153 {
1154 if (JUMP_P (insn))
1155 {
1156 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1157 break;
1158 }
1159 else if (CALL_P (insn))
1160 break;
1161 }
1162
1163 return const0_rtx;
1164 }
1165
1166 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1167 (not all will be used on all machines) that was passed to __builtin_setjmp.
1168 It updates the stack pointer in that block to the current value. This is
1169 also called directly by the SJLJ exception handling code. */
1170
1171 void
1172 expand_builtin_update_setjmp_buf (rtx buf_addr)
1173 {
1174 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1175 rtx stack_save
1176 = gen_rtx_MEM (sa_mode,
1177 memory_address
1178 (sa_mode,
1179 plus_constant (Pmode, buf_addr,
1180 2 * GET_MODE_SIZE (Pmode))));
1181
1182 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1183 }
1184
1185 /* Expand a call to __builtin_prefetch. For a target that does not support
1186 data prefetch, evaluate the memory address argument in case it has side
1187 effects. */
1188
1189 static void
1190 expand_builtin_prefetch (tree exp)
1191 {
1192 tree arg0, arg1, arg2;
1193 int nargs;
1194 rtx op0, op1, op2;
1195
1196 if (!validate_arglist (exp, POINTER_TYPE, 0))
1197 return;
1198
1199 arg0 = CALL_EXPR_ARG (exp, 0);
1200
1201 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1202 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1203 locality). */
1204 nargs = call_expr_nargs (exp);
1205 if (nargs > 1)
1206 arg1 = CALL_EXPR_ARG (exp, 1);
1207 else
1208 arg1 = integer_zero_node;
1209 if (nargs > 2)
1210 arg2 = CALL_EXPR_ARG (exp, 2);
1211 else
1212 arg2 = integer_three_node;
1213
1214 /* Argument 0 is an address. */
1215 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1216
1217 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1218 if (TREE_CODE (arg1) != INTEGER_CST)
1219 {
1220 error ("second argument to %<__builtin_prefetch%> must be a constant");
1221 arg1 = integer_zero_node;
1222 }
1223 op1 = expand_normal (arg1);
1224 /* Argument 1 must be either zero or one. */
1225 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1226 {
1227 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1228 " using zero");
1229 op1 = const0_rtx;
1230 }
1231
1232 /* Argument 2 (locality) must be a compile-time constant int. */
1233 if (TREE_CODE (arg2) != INTEGER_CST)
1234 {
1235 error ("third argument to %<__builtin_prefetch%> must be a constant");
1236 arg2 = integer_zero_node;
1237 }
1238 op2 = expand_normal (arg2);
1239 /* Argument 2 must be 0, 1, 2, or 3. */
1240 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1241 {
1242 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1243 op2 = const0_rtx;
1244 }
1245
1246 if (targetm.have_prefetch ())
1247 {
1248 struct expand_operand ops[3];
1249
1250 create_address_operand (&ops[0], op0);
1251 create_integer_operand (&ops[1], INTVAL (op1));
1252 create_integer_operand (&ops[2], INTVAL (op2));
1253 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1254 return;
1255 }
1256
1257 /* Don't do anything with direct references to volatile memory, but
1258 generate code to handle other side effects. */
1259 if (!MEM_P (op0) && side_effects_p (op0))
1260 emit_insn (op0);
1261 }
1262
1263 /* Get a MEM rtx for expression EXP which is the address of an operand
1264 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1265 the maximum length of the block of memory that might be accessed or
1266 NULL if unknown. */
1267
1268 static rtx
1269 get_memory_rtx (tree exp, tree len)
1270 {
1271 tree orig_exp = exp;
1272 rtx addr, mem;
1273
1274 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1275 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1276 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1277 exp = TREE_OPERAND (exp, 0);
1278
1279 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1280 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1281
1282 /* Get an expression we can use to find the attributes to assign to MEM.
1283 First remove any nops. */
1284 while (CONVERT_EXPR_P (exp)
1285 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1286 exp = TREE_OPERAND (exp, 0);
1287
1288 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1289 (as builtin stringops may alias with anything). */
1290 exp = fold_build2 (MEM_REF,
1291 build_array_type (char_type_node,
1292 build_range_type (sizetype,
1293 size_one_node, len)),
1294 exp, build_int_cst (ptr_type_node, 0));
1295
1296 /* If the MEM_REF has no acceptable address, try to get the base object
1297 from the original address we got, and build an all-aliasing
1298 unknown-sized access to that one. */
1299 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1300 set_mem_attributes (mem, exp, 0);
1301 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1302 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1303 0))))
1304 {
1305 exp = build_fold_addr_expr (exp);
1306 exp = fold_build2 (MEM_REF,
1307 build_array_type (char_type_node,
1308 build_range_type (sizetype,
1309 size_zero_node,
1310 NULL)),
1311 exp, build_int_cst (ptr_type_node, 0));
1312 set_mem_attributes (mem, exp, 0);
1313 }
1314 set_mem_alias_set (mem, 0);
1315 return mem;
1316 }
1317 \f
1318 /* Built-in functions to perform an untyped call and return. */
1319
1320 #define apply_args_mode \
1321 (this_target_builtins->x_apply_args_mode)
1322 #define apply_result_mode \
1323 (this_target_builtins->x_apply_result_mode)
1324
1325 /* Return the size required for the block returned by __builtin_apply_args,
1326 and initialize apply_args_mode. */
1327
1328 static int
1329 apply_args_size (void)
1330 {
1331 static int size = -1;
1332 int align;
1333 unsigned int regno;
1334 machine_mode mode;
1335
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1338 {
1339 /* The first value is the incoming arg-pointer. */
1340 size = GET_MODE_SIZE (Pmode);
1341
1342 /* The second value is the structure value address unless this is
1343 passed as an "invisible" first argument. */
1344 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1345 size += GET_MODE_SIZE (Pmode);
1346
1347 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1348 if (FUNCTION_ARG_REGNO_P (regno))
1349 {
1350 mode = targetm.calls.get_raw_arg_mode (regno);
1351
1352 gcc_assert (mode != VOIDmode);
1353
1354 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1355 if (size % align != 0)
1356 size = CEIL (size, align) * align;
1357 size += GET_MODE_SIZE (mode);
1358 apply_args_mode[regno] = mode;
1359 }
1360 else
1361 {
1362 apply_args_mode[regno] = VOIDmode;
1363 }
1364 }
1365 return size;
1366 }
1367
1368 /* Return the size required for the block returned by __builtin_apply,
1369 and initialize apply_result_mode. */
1370
1371 static int
1372 apply_result_size (void)
1373 {
1374 static int size = -1;
1375 int align, regno;
1376 machine_mode mode;
1377
1378 /* The values computed by this function never change. */
1379 if (size < 0)
1380 {
1381 size = 0;
1382
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (targetm.calls.function_value_regno_p (regno))
1385 {
1386 mode = targetm.calls.get_raw_result_mode (regno);
1387
1388 gcc_assert (mode != VOIDmode);
1389
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_result_mode[regno] = mode;
1395 }
1396 else
1397 apply_result_mode[regno] = VOIDmode;
1398
1399 /* Allow targets that use untyped_call and untyped_return to override
1400 the size so that machine-specific information can be stored here. */
1401 #ifdef APPLY_RESULT_SIZE
1402 size = APPLY_RESULT_SIZE;
1403 #endif
1404 }
1405 return size;
1406 }
1407
1408 /* Create a vector describing the result block RESULT. If SAVEP is true,
1409 the result block is used to save the values; otherwise it is used to
1410 restore the values. */
1411
1412 static rtx
1413 result_vector (int savep, rtx result)
1414 {
1415 int regno, size, align, nelts;
1416 machine_mode mode;
1417 rtx reg, mem;
1418 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1419
1420 size = nelts = 0;
1421 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1422 if ((mode = apply_result_mode[regno]) != VOIDmode)
1423 {
1424 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1425 if (size % align != 0)
1426 size = CEIL (size, align) * align;
1427 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1428 mem = adjust_address (result, mode, size);
1429 savevec[nelts++] = (savep
1430 ? gen_rtx_SET (mem, reg)
1431 : gen_rtx_SET (reg, mem));
1432 size += GET_MODE_SIZE (mode);
1433 }
1434 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1435 }
1436
1437 /* Save the state required to perform an untyped call with the same
1438 arguments as were passed to the current function. */
1439
1440 static rtx
1441 expand_builtin_apply_args_1 (void)
1442 {
1443 rtx registers, tem;
1444 int size, align, regno;
1445 machine_mode mode;
1446 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1447
1448 /* Create a block where the arg-pointer, structure value address,
1449 and argument registers can be saved. */
1450 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1451
1452 /* Walk past the arg-pointer and structure value address. */
1453 size = GET_MODE_SIZE (Pmode);
1454 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1455 size += GET_MODE_SIZE (Pmode);
1456
1457 /* Save each register used in calling a function to the block. */
1458 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1459 if ((mode = apply_args_mode[regno]) != VOIDmode)
1460 {
1461 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1462 if (size % align != 0)
1463 size = CEIL (size, align) * align;
1464
1465 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1466
1467 emit_move_insn (adjust_address (registers, mode, size), tem);
1468 size += GET_MODE_SIZE (mode);
1469 }
1470
1471 /* Save the arg pointer to the block. */
1472 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1473 /* We need the pointer as the caller actually passed them to us, not
1474 as we might have pretended they were passed. Make sure it's a valid
1475 operand, as emit_move_insn isn't expected to handle a PLUS. */
1476 if (STACK_GROWS_DOWNWARD)
1477 tem
1478 = force_operand (plus_constant (Pmode, tem,
1479 crtl->args.pretend_args_size),
1480 NULL_RTX);
1481 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1482
1483 size = GET_MODE_SIZE (Pmode);
1484
1485 /* Save the structure value address unless this is passed as an
1486 "invisible" first argument. */
1487 if (struct_incoming_value)
1488 {
1489 emit_move_insn (adjust_address (registers, Pmode, size),
1490 copy_to_reg (struct_incoming_value));
1491 size += GET_MODE_SIZE (Pmode);
1492 }
1493
1494 /* Return the address of the block. */
1495 return copy_addr_to_reg (XEXP (registers, 0));
1496 }
1497
1498 /* __builtin_apply_args returns block of memory allocated on
1499 the stack into which is stored the arg pointer, structure
1500 value address, static chain, and all the registers that might
1501 possibly be used in performing a function call. The code is
1502 moved to the start of the function so the incoming values are
1503 saved. */
1504
1505 static rtx
1506 expand_builtin_apply_args (void)
1507 {
1508 /* Don't do __builtin_apply_args more than once in a function.
1509 Save the result of the first call and reuse it. */
1510 if (apply_args_value != 0)
1511 return apply_args_value;
1512 {
1513 /* When this function is called, it means that registers must be
1514 saved on entry to this function. So we migrate the
1515 call to the first insn of this function. */
1516 rtx temp;
1517
1518 start_sequence ();
1519 temp = expand_builtin_apply_args_1 ();
1520 rtx_insn *seq = get_insns ();
1521 end_sequence ();
1522
1523 apply_args_value = temp;
1524
1525 /* Put the insns after the NOTE that starts the function.
1526 If this is inside a start_sequence, make the outer-level insn
1527 chain current, so the code is placed at the start of the
1528 function. If internal_arg_pointer is a non-virtual pseudo,
1529 it needs to be placed after the function that initializes
1530 that pseudo. */
1531 push_topmost_sequence ();
1532 if (REG_P (crtl->args.internal_arg_pointer)
1533 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1534 emit_insn_before (seq, parm_birth_insn);
1535 else
1536 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1537 pop_topmost_sequence ();
1538 return temp;
1539 }
1540 }
1541
1542 /* Perform an untyped call and save the state required to perform an
1543 untyped return of whatever value was returned by the given function. */
1544
1545 static rtx
1546 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1547 {
1548 int size, align, regno;
1549 machine_mode mode;
1550 rtx incoming_args, result, reg, dest, src;
1551 rtx_call_insn *call_insn;
1552 rtx old_stack_level = 0;
1553 rtx call_fusage = 0;
1554 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1555
1556 arguments = convert_memory_address (Pmode, arguments);
1557
1558 /* Create a block where the return registers can be saved. */
1559 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1560
1561 /* Fetch the arg pointer from the ARGUMENTS block. */
1562 incoming_args = gen_reg_rtx (Pmode);
1563 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1564 if (!STACK_GROWS_DOWNWARD)
1565 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1566 incoming_args, 0, OPTAB_LIB_WIDEN);
1567
1568 /* Push a new argument block and copy the arguments. Do not allow
1569 the (potential) memcpy call below to interfere with our stack
1570 manipulations. */
1571 do_pending_stack_adjust ();
1572 NO_DEFER_POP;
1573
1574 /* Save the stack with nonlocal if available. */
1575 if (targetm.have_save_stack_nonlocal ())
1576 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1577 else
1578 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1579
1580 /* Allocate a block of memory onto the stack and copy the memory
1581 arguments to the outgoing arguments address. We can pass TRUE
1582 as the 4th argument because we just saved the stack pointer
1583 and will restore it right after the call. */
1584 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1585
1586 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1587 may have already set current_function_calls_alloca to true.
1588 current_function_calls_alloca won't be set if argsize is zero,
1589 so we have to guarantee need_drap is true here. */
1590 if (SUPPORTS_STACK_ALIGNMENT)
1591 crtl->need_drap = true;
1592
1593 dest = virtual_outgoing_args_rtx;
1594 if (!STACK_GROWS_DOWNWARD)
1595 {
1596 if (CONST_INT_P (argsize))
1597 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1598 else
1599 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1600 }
1601 dest = gen_rtx_MEM (BLKmode, dest);
1602 set_mem_align (dest, PARM_BOUNDARY);
1603 src = gen_rtx_MEM (BLKmode, incoming_args);
1604 set_mem_align (src, PARM_BOUNDARY);
1605 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1606
1607 /* Refer to the argument block. */
1608 apply_args_size ();
1609 arguments = gen_rtx_MEM (BLKmode, arguments);
1610 set_mem_align (arguments, PARM_BOUNDARY);
1611
1612 /* Walk past the arg-pointer and structure value address. */
1613 size = GET_MODE_SIZE (Pmode);
1614 if (struct_value)
1615 size += GET_MODE_SIZE (Pmode);
1616
1617 /* Restore each of the registers previously saved. Make USE insns
1618 for each of these registers for use in making the call. */
1619 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1620 if ((mode = apply_args_mode[regno]) != VOIDmode)
1621 {
1622 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1623 if (size % align != 0)
1624 size = CEIL (size, align) * align;
1625 reg = gen_rtx_REG (mode, regno);
1626 emit_move_insn (reg, adjust_address (arguments, mode, size));
1627 use_reg (&call_fusage, reg);
1628 size += GET_MODE_SIZE (mode);
1629 }
1630
1631 /* Restore the structure value address unless this is passed as an
1632 "invisible" first argument. */
1633 size = GET_MODE_SIZE (Pmode);
1634 if (struct_value)
1635 {
1636 rtx value = gen_reg_rtx (Pmode);
1637 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1638 emit_move_insn (struct_value, value);
1639 if (REG_P (struct_value))
1640 use_reg (&call_fusage, struct_value);
1641 size += GET_MODE_SIZE (Pmode);
1642 }
1643
1644 /* All arguments and registers used for the call are set up by now! */
1645 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1646
1647 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1648 and we don't want to load it into a register as an optimization,
1649 because prepare_call_address already did it if it should be done. */
1650 if (GET_CODE (function) != SYMBOL_REF)
1651 function = memory_address (FUNCTION_MODE, function);
1652
1653 /* Generate the actual call instruction and save the return value. */
1654 if (targetm.have_untyped_call ())
1655 {
1656 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1657 emit_call_insn (targetm.gen_untyped_call (mem, result,
1658 result_vector (1, result)));
1659 }
1660 else if (targetm.have_call_value ())
1661 {
1662 rtx valreg = 0;
1663
1664 /* Locate the unique return register. It is not possible to
1665 express a call that sets more than one return register using
1666 call_value; use untyped_call for that. In fact, untyped_call
1667 only needs to save the return registers in the given block. */
1668 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1669 if ((mode = apply_result_mode[regno]) != VOIDmode)
1670 {
1671 gcc_assert (!valreg); /* have_untyped_call required. */
1672
1673 valreg = gen_rtx_REG (mode, regno);
1674 }
1675
1676 emit_insn (targetm.gen_call_value (valreg,
1677 gen_rtx_MEM (FUNCTION_MODE, function),
1678 const0_rtx, NULL_RTX, const0_rtx));
1679
1680 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1681 }
1682 else
1683 gcc_unreachable ();
1684
1685 /* Find the CALL insn we just emitted, and attach the register usage
1686 information. */
1687 call_insn = last_call_insn ();
1688 add_function_usage_to (call_insn, call_fusage);
1689
1690 /* Restore the stack. */
1691 if (targetm.have_save_stack_nonlocal ())
1692 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1693 else
1694 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1695 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1696
1697 OK_DEFER_POP;
1698
1699 /* Return the address of the result block. */
1700 result = copy_addr_to_reg (XEXP (result, 0));
1701 return convert_memory_address (ptr_mode, result);
1702 }
1703
1704 /* Perform an untyped return. */
1705
1706 static void
1707 expand_builtin_return (rtx result)
1708 {
1709 int size, align, regno;
1710 machine_mode mode;
1711 rtx reg;
1712 rtx_insn *call_fusage = 0;
1713
1714 result = convert_memory_address (Pmode, result);
1715
1716 apply_result_size ();
1717 result = gen_rtx_MEM (BLKmode, result);
1718
1719 if (targetm.have_untyped_return ())
1720 {
1721 rtx vector = result_vector (0, result);
1722 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1723 emit_barrier ();
1724 return;
1725 }
1726
1727 /* Restore the return value and note that each value is used. */
1728 size = 0;
1729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1730 if ((mode = apply_result_mode[regno]) != VOIDmode)
1731 {
1732 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1733 if (size % align != 0)
1734 size = CEIL (size, align) * align;
1735 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1736 emit_move_insn (reg, adjust_address (result, mode, size));
1737
1738 push_to_sequence (call_fusage);
1739 emit_use (reg);
1740 call_fusage = get_insns ();
1741 end_sequence ();
1742 size += GET_MODE_SIZE (mode);
1743 }
1744
1745 /* Put the USE insns before the return. */
1746 emit_insn (call_fusage);
1747
1748 /* Return whatever values was restored by jumping directly to the end
1749 of the function. */
1750 expand_naked_return ();
1751 }
1752
1753 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1754
1755 static enum type_class
1756 type_to_class (tree type)
1757 {
1758 switch (TREE_CODE (type))
1759 {
1760 case VOID_TYPE: return void_type_class;
1761 case INTEGER_TYPE: return integer_type_class;
1762 case ENUMERAL_TYPE: return enumeral_type_class;
1763 case BOOLEAN_TYPE: return boolean_type_class;
1764 case POINTER_TYPE: return pointer_type_class;
1765 case REFERENCE_TYPE: return reference_type_class;
1766 case OFFSET_TYPE: return offset_type_class;
1767 case REAL_TYPE: return real_type_class;
1768 case COMPLEX_TYPE: return complex_type_class;
1769 case FUNCTION_TYPE: return function_type_class;
1770 case METHOD_TYPE: return method_type_class;
1771 case RECORD_TYPE: return record_type_class;
1772 case UNION_TYPE:
1773 case QUAL_UNION_TYPE: return union_type_class;
1774 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1775 ? string_type_class : array_type_class);
1776 case LANG_TYPE: return lang_type_class;
1777 default: return no_type_class;
1778 }
1779 }
1780
1781 /* Expand a call EXP to __builtin_classify_type. */
1782
1783 static rtx
1784 expand_builtin_classify_type (tree exp)
1785 {
1786 if (call_expr_nargs (exp))
1787 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1788 return GEN_INT (no_type_class);
1789 }
1790
1791 /* This helper macro, meant to be used in mathfn_built_in below,
1792 determines which among a set of three builtin math functions is
1793 appropriate for a given type mode. The `F' and `L' cases are
1794 automatically generated from the `double' case. */
1795 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1796 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1797 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1798 fcodel = BUILT_IN_MATHFN##L ; break;
1799 /* Similar to above, but appends _R after any F/L suffix. */
1800 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1801 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1802 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1803 fcodel = BUILT_IN_MATHFN##L_R ; break;
1804
1805 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1806 if available. If IMPLICIT is true use the implicit builtin declaration,
1807 otherwise use the explicit declaration. If we can't do the conversion,
1808 return zero. */
1809
1810 static tree
1811 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1812 {
1813 enum built_in_function fcode, fcodef, fcodel, fcode2;
1814
1815 switch (fn)
1816 {
1817 CASE_MATHFN (BUILT_IN_ACOS)
1818 CASE_MATHFN (BUILT_IN_ACOSH)
1819 CASE_MATHFN (BUILT_IN_ASIN)
1820 CASE_MATHFN (BUILT_IN_ASINH)
1821 CASE_MATHFN (BUILT_IN_ATAN)
1822 CASE_MATHFN (BUILT_IN_ATAN2)
1823 CASE_MATHFN (BUILT_IN_ATANH)
1824 CASE_MATHFN (BUILT_IN_CBRT)
1825 CASE_MATHFN (BUILT_IN_CEIL)
1826 CASE_MATHFN (BUILT_IN_CEXPI)
1827 CASE_MATHFN (BUILT_IN_COPYSIGN)
1828 CASE_MATHFN (BUILT_IN_COS)
1829 CASE_MATHFN (BUILT_IN_COSH)
1830 CASE_MATHFN (BUILT_IN_DREM)
1831 CASE_MATHFN (BUILT_IN_ERF)
1832 CASE_MATHFN (BUILT_IN_ERFC)
1833 CASE_MATHFN (BUILT_IN_EXP)
1834 CASE_MATHFN (BUILT_IN_EXP10)
1835 CASE_MATHFN (BUILT_IN_EXP2)
1836 CASE_MATHFN (BUILT_IN_EXPM1)
1837 CASE_MATHFN (BUILT_IN_FABS)
1838 CASE_MATHFN (BUILT_IN_FDIM)
1839 CASE_MATHFN (BUILT_IN_FLOOR)
1840 CASE_MATHFN (BUILT_IN_FMA)
1841 CASE_MATHFN (BUILT_IN_FMAX)
1842 CASE_MATHFN (BUILT_IN_FMIN)
1843 CASE_MATHFN (BUILT_IN_FMOD)
1844 CASE_MATHFN (BUILT_IN_FREXP)
1845 CASE_MATHFN (BUILT_IN_GAMMA)
1846 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1847 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1848 CASE_MATHFN (BUILT_IN_HYPOT)
1849 CASE_MATHFN (BUILT_IN_ILOGB)
1850 CASE_MATHFN (BUILT_IN_ICEIL)
1851 CASE_MATHFN (BUILT_IN_IFLOOR)
1852 CASE_MATHFN (BUILT_IN_INF)
1853 CASE_MATHFN (BUILT_IN_IRINT)
1854 CASE_MATHFN (BUILT_IN_IROUND)
1855 CASE_MATHFN (BUILT_IN_ISINF)
1856 CASE_MATHFN (BUILT_IN_J0)
1857 CASE_MATHFN (BUILT_IN_J1)
1858 CASE_MATHFN (BUILT_IN_JN)
1859 CASE_MATHFN (BUILT_IN_LCEIL)
1860 CASE_MATHFN (BUILT_IN_LDEXP)
1861 CASE_MATHFN (BUILT_IN_LFLOOR)
1862 CASE_MATHFN (BUILT_IN_LGAMMA)
1863 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1864 CASE_MATHFN (BUILT_IN_LLCEIL)
1865 CASE_MATHFN (BUILT_IN_LLFLOOR)
1866 CASE_MATHFN (BUILT_IN_LLRINT)
1867 CASE_MATHFN (BUILT_IN_LLROUND)
1868 CASE_MATHFN (BUILT_IN_LOG)
1869 CASE_MATHFN (BUILT_IN_LOG10)
1870 CASE_MATHFN (BUILT_IN_LOG1P)
1871 CASE_MATHFN (BUILT_IN_LOG2)
1872 CASE_MATHFN (BUILT_IN_LOGB)
1873 CASE_MATHFN (BUILT_IN_LRINT)
1874 CASE_MATHFN (BUILT_IN_LROUND)
1875 CASE_MATHFN (BUILT_IN_MODF)
1876 CASE_MATHFN (BUILT_IN_NAN)
1877 CASE_MATHFN (BUILT_IN_NANS)
1878 CASE_MATHFN (BUILT_IN_NEARBYINT)
1879 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1880 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1881 CASE_MATHFN (BUILT_IN_POW)
1882 CASE_MATHFN (BUILT_IN_POWI)
1883 CASE_MATHFN (BUILT_IN_POW10)
1884 CASE_MATHFN (BUILT_IN_REMAINDER)
1885 CASE_MATHFN (BUILT_IN_REMQUO)
1886 CASE_MATHFN (BUILT_IN_RINT)
1887 CASE_MATHFN (BUILT_IN_ROUND)
1888 CASE_MATHFN (BUILT_IN_SCALB)
1889 CASE_MATHFN (BUILT_IN_SCALBLN)
1890 CASE_MATHFN (BUILT_IN_SCALBN)
1891 CASE_MATHFN (BUILT_IN_SIGNBIT)
1892 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1893 CASE_MATHFN (BUILT_IN_SIN)
1894 CASE_MATHFN (BUILT_IN_SINCOS)
1895 CASE_MATHFN (BUILT_IN_SINH)
1896 CASE_MATHFN (BUILT_IN_SQRT)
1897 CASE_MATHFN (BUILT_IN_TAN)
1898 CASE_MATHFN (BUILT_IN_TANH)
1899 CASE_MATHFN (BUILT_IN_TGAMMA)
1900 CASE_MATHFN (BUILT_IN_TRUNC)
1901 CASE_MATHFN (BUILT_IN_Y0)
1902 CASE_MATHFN (BUILT_IN_Y1)
1903 CASE_MATHFN (BUILT_IN_YN)
1904
1905 default:
1906 return NULL_TREE;
1907 }
1908
1909 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1910 fcode2 = fcode;
1911 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1912 fcode2 = fcodef;
1913 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1914 fcode2 = fcodel;
1915 else
1916 return NULL_TREE;
1917
1918 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1919 return NULL_TREE;
1920
1921 return builtin_decl_explicit (fcode2);
1922 }
1923
1924 /* Like mathfn_built_in_1(), but always use the implicit array. */
1925
1926 tree
1927 mathfn_built_in (tree type, enum built_in_function fn)
1928 {
1929 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1930 }
1931
1932 /* If errno must be maintained, expand the RTL to check if the result,
1933 TARGET, of a built-in function call, EXP, is NaN, and if so set
1934 errno to EDOM. */
1935
1936 static void
1937 expand_errno_check (tree exp, rtx target)
1938 {
1939 rtx_code_label *lab = gen_label_rtx ();
1940
1941 /* Test the result; if it is NaN, set errno=EDOM because
1942 the argument was not in the domain. */
1943 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1944 NULL_RTX, NULL, lab,
1945 /* The jump is very likely. */
1946 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1947
1948 #ifdef TARGET_EDOM
1949 /* If this built-in doesn't throw an exception, set errno directly. */
1950 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1951 {
1952 #ifdef GEN_ERRNO_RTX
1953 rtx errno_rtx = GEN_ERRNO_RTX;
1954 #else
1955 rtx errno_rtx
1956 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1957 #endif
1958 emit_move_insn (errno_rtx,
1959 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1960 emit_label (lab);
1961 return;
1962 }
1963 #endif
1964
1965 /* Make sure the library call isn't expanded as a tail call. */
1966 CALL_EXPR_TAILCALL (exp) = 0;
1967
1968 /* We can't set errno=EDOM directly; let the library call do it.
1969 Pop the arguments right away in case the call gets deleted. */
1970 NO_DEFER_POP;
1971 expand_call (exp, target, 0);
1972 OK_DEFER_POP;
1973 emit_label (lab);
1974 }
1975
1976 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1977 Return NULL_RTX if a normal call should be emitted rather than expanding
1978 the function in-line. EXP is the expression that is a call to the builtin
1979 function; if convenient, the result should be placed in TARGET.
1980 SUBTARGET may be used as the target for computing one of EXP's operands. */
1981
1982 static rtx
1983 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1984 {
1985 optab builtin_optab;
1986 rtx op0;
1987 rtx_insn *insns;
1988 tree fndecl = get_callee_fndecl (exp);
1989 machine_mode mode;
1990 bool errno_set = false;
1991 bool try_widening = false;
1992 tree arg;
1993
1994 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1995 return NULL_RTX;
1996
1997 arg = CALL_EXPR_ARG (exp, 0);
1998
1999 switch (DECL_FUNCTION_CODE (fndecl))
2000 {
2001 CASE_FLT_FN (BUILT_IN_SQRT):
2002 errno_set = ! tree_expr_nonnegative_p (arg);
2003 try_widening = true;
2004 builtin_optab = sqrt_optab;
2005 break;
2006 CASE_FLT_FN (BUILT_IN_EXP):
2007 errno_set = true; builtin_optab = exp_optab; break;
2008 CASE_FLT_FN (BUILT_IN_EXP10):
2009 CASE_FLT_FN (BUILT_IN_POW10):
2010 errno_set = true; builtin_optab = exp10_optab; break;
2011 CASE_FLT_FN (BUILT_IN_EXP2):
2012 errno_set = true; builtin_optab = exp2_optab; break;
2013 CASE_FLT_FN (BUILT_IN_EXPM1):
2014 errno_set = true; builtin_optab = expm1_optab; break;
2015 CASE_FLT_FN (BUILT_IN_LOGB):
2016 errno_set = true; builtin_optab = logb_optab; break;
2017 CASE_FLT_FN (BUILT_IN_LOG):
2018 errno_set = true; builtin_optab = log_optab; break;
2019 CASE_FLT_FN (BUILT_IN_LOG10):
2020 errno_set = true; builtin_optab = log10_optab; break;
2021 CASE_FLT_FN (BUILT_IN_LOG2):
2022 errno_set = true; builtin_optab = log2_optab; break;
2023 CASE_FLT_FN (BUILT_IN_LOG1P):
2024 errno_set = true; builtin_optab = log1p_optab; break;
2025 CASE_FLT_FN (BUILT_IN_ASIN):
2026 builtin_optab = asin_optab; break;
2027 CASE_FLT_FN (BUILT_IN_ACOS):
2028 builtin_optab = acos_optab; break;
2029 CASE_FLT_FN (BUILT_IN_TAN):
2030 builtin_optab = tan_optab; break;
2031 CASE_FLT_FN (BUILT_IN_ATAN):
2032 builtin_optab = atan_optab; break;
2033 CASE_FLT_FN (BUILT_IN_FLOOR):
2034 builtin_optab = floor_optab; break;
2035 CASE_FLT_FN (BUILT_IN_CEIL):
2036 builtin_optab = ceil_optab; break;
2037 CASE_FLT_FN (BUILT_IN_TRUNC):
2038 builtin_optab = btrunc_optab; break;
2039 CASE_FLT_FN (BUILT_IN_ROUND):
2040 builtin_optab = round_optab; break;
2041 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2042 builtin_optab = nearbyint_optab;
2043 if (flag_trapping_math)
2044 break;
2045 /* Else fallthrough and expand as rint. */
2046 CASE_FLT_FN (BUILT_IN_RINT):
2047 builtin_optab = rint_optab; break;
2048 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2049 builtin_optab = significand_optab; break;
2050 default:
2051 gcc_unreachable ();
2052 }
2053
2054 /* Make a suitable register to place result in. */
2055 mode = TYPE_MODE (TREE_TYPE (exp));
2056
2057 if (! flag_errno_math || ! HONOR_NANS (mode))
2058 errno_set = false;
2059
2060 /* Before working hard, check whether the instruction is available, but try
2061 to widen the mode for specific operations. */
2062 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2063 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2064 && (!errno_set || !optimize_insn_for_size_p ()))
2065 {
2066 rtx result = gen_reg_rtx (mode);
2067
2068 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2069 need to expand the argument again. This way, we will not perform
2070 side-effects more the once. */
2071 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2072
2073 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2074
2075 start_sequence ();
2076
2077 /* Compute into RESULT.
2078 Set RESULT to wherever the result comes back. */
2079 result = expand_unop (mode, builtin_optab, op0, result, 0);
2080
2081 if (result != 0)
2082 {
2083 if (errno_set)
2084 expand_errno_check (exp, result);
2085
2086 /* Output the entire sequence. */
2087 insns = get_insns ();
2088 end_sequence ();
2089 emit_insn (insns);
2090 return result;
2091 }
2092
2093 /* If we were unable to expand via the builtin, stop the sequence
2094 (without outputting the insns) and call to the library function
2095 with the stabilized argument list. */
2096 end_sequence ();
2097 }
2098
2099 return expand_call (exp, target, target == const0_rtx);
2100 }
2101
2102 /* Expand a call to the builtin binary math functions (pow and atan2).
2103 Return NULL_RTX if a normal call should be emitted rather than expanding the
2104 function in-line. EXP is the expression that is a call to the builtin
2105 function; if convenient, the result should be placed in TARGET.
2106 SUBTARGET may be used as the target for computing one of EXP's
2107 operands. */
2108
2109 static rtx
2110 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2111 {
2112 optab builtin_optab;
2113 rtx op0, op1, result;
2114 rtx_insn *insns;
2115 int op1_type = REAL_TYPE;
2116 tree fndecl = get_callee_fndecl (exp);
2117 tree arg0, arg1;
2118 machine_mode mode;
2119 bool errno_set = true;
2120
2121 switch (DECL_FUNCTION_CODE (fndecl))
2122 {
2123 CASE_FLT_FN (BUILT_IN_SCALBN):
2124 CASE_FLT_FN (BUILT_IN_SCALBLN):
2125 CASE_FLT_FN (BUILT_IN_LDEXP):
2126 op1_type = INTEGER_TYPE;
2127 default:
2128 break;
2129 }
2130
2131 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2132 return NULL_RTX;
2133
2134 arg0 = CALL_EXPR_ARG (exp, 0);
2135 arg1 = CALL_EXPR_ARG (exp, 1);
2136
2137 switch (DECL_FUNCTION_CODE (fndecl))
2138 {
2139 CASE_FLT_FN (BUILT_IN_POW):
2140 builtin_optab = pow_optab; break;
2141 CASE_FLT_FN (BUILT_IN_ATAN2):
2142 builtin_optab = atan2_optab; break;
2143 CASE_FLT_FN (BUILT_IN_SCALB):
2144 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2145 return 0;
2146 builtin_optab = scalb_optab; break;
2147 CASE_FLT_FN (BUILT_IN_SCALBN):
2148 CASE_FLT_FN (BUILT_IN_SCALBLN):
2149 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2150 return 0;
2151 /* Fall through... */
2152 CASE_FLT_FN (BUILT_IN_LDEXP):
2153 builtin_optab = ldexp_optab; break;
2154 CASE_FLT_FN (BUILT_IN_FMOD):
2155 builtin_optab = fmod_optab; break;
2156 CASE_FLT_FN (BUILT_IN_REMAINDER):
2157 CASE_FLT_FN (BUILT_IN_DREM):
2158 builtin_optab = remainder_optab; break;
2159 default:
2160 gcc_unreachable ();
2161 }
2162
2163 /* Make a suitable register to place result in. */
2164 mode = TYPE_MODE (TREE_TYPE (exp));
2165
2166 /* Before working hard, check whether the instruction is available. */
2167 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2168 return NULL_RTX;
2169
2170 result = gen_reg_rtx (mode);
2171
2172 if (! flag_errno_math || ! HONOR_NANS (mode))
2173 errno_set = false;
2174
2175 if (errno_set && optimize_insn_for_size_p ())
2176 return 0;
2177
2178 /* Always stabilize the argument list. */
2179 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2180 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2181
2182 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2183 op1 = expand_normal (arg1);
2184
2185 start_sequence ();
2186
2187 /* Compute into RESULT.
2188 Set RESULT to wherever the result comes back. */
2189 result = expand_binop (mode, builtin_optab, op0, op1,
2190 result, 0, OPTAB_DIRECT);
2191
2192 /* If we were unable to expand via the builtin, stop the sequence
2193 (without outputting the insns) and call to the library function
2194 with the stabilized argument list. */
2195 if (result == 0)
2196 {
2197 end_sequence ();
2198 return expand_call (exp, target, target == const0_rtx);
2199 }
2200
2201 if (errno_set)
2202 expand_errno_check (exp, result);
2203
2204 /* Output the entire sequence. */
2205 insns = get_insns ();
2206 end_sequence ();
2207 emit_insn (insns);
2208
2209 return result;
2210 }
2211
2212 /* Expand a call to the builtin trinary math functions (fma).
2213 Return NULL_RTX if a normal call should be emitted rather than expanding the
2214 function in-line. EXP is the expression that is a call to the builtin
2215 function; if convenient, the result should be placed in TARGET.
2216 SUBTARGET may be used as the target for computing one of EXP's
2217 operands. */
2218
2219 static rtx
2220 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2221 {
2222 optab builtin_optab;
2223 rtx op0, op1, op2, result;
2224 rtx_insn *insns;
2225 tree fndecl = get_callee_fndecl (exp);
2226 tree arg0, arg1, arg2;
2227 machine_mode mode;
2228
2229 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2230 return NULL_RTX;
2231
2232 arg0 = CALL_EXPR_ARG (exp, 0);
2233 arg1 = CALL_EXPR_ARG (exp, 1);
2234 arg2 = CALL_EXPR_ARG (exp, 2);
2235
2236 switch (DECL_FUNCTION_CODE (fndecl))
2237 {
2238 CASE_FLT_FN (BUILT_IN_FMA):
2239 builtin_optab = fma_optab; break;
2240 default:
2241 gcc_unreachable ();
2242 }
2243
2244 /* Make a suitable register to place result in. */
2245 mode = TYPE_MODE (TREE_TYPE (exp));
2246
2247 /* Before working hard, check whether the instruction is available. */
2248 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2249 return NULL_RTX;
2250
2251 result = gen_reg_rtx (mode);
2252
2253 /* Always stabilize the argument list. */
2254 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2255 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2256 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2257
2258 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2259 op1 = expand_normal (arg1);
2260 op2 = expand_normal (arg2);
2261
2262 start_sequence ();
2263
2264 /* Compute into RESULT.
2265 Set RESULT to wherever the result comes back. */
2266 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2267 result, 0);
2268
2269 /* If we were unable to expand via the builtin, stop the sequence
2270 (without outputting the insns) and call to the library function
2271 with the stabilized argument list. */
2272 if (result == 0)
2273 {
2274 end_sequence ();
2275 return expand_call (exp, target, target == const0_rtx);
2276 }
2277
2278 /* Output the entire sequence. */
2279 insns = get_insns ();
2280 end_sequence ();
2281 emit_insn (insns);
2282
2283 return result;
2284 }
2285
2286 /* Expand a call to the builtin sin and cos math functions.
2287 Return NULL_RTX if a normal call should be emitted rather than expanding the
2288 function in-line. EXP is the expression that is a call to the builtin
2289 function; if convenient, the result should be placed in TARGET.
2290 SUBTARGET may be used as the target for computing one of EXP's
2291 operands. */
2292
2293 static rtx
2294 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2295 {
2296 optab builtin_optab;
2297 rtx op0;
2298 rtx_insn *insns;
2299 tree fndecl = get_callee_fndecl (exp);
2300 machine_mode mode;
2301 tree arg;
2302
2303 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2304 return NULL_RTX;
2305
2306 arg = CALL_EXPR_ARG (exp, 0);
2307
2308 switch (DECL_FUNCTION_CODE (fndecl))
2309 {
2310 CASE_FLT_FN (BUILT_IN_SIN):
2311 CASE_FLT_FN (BUILT_IN_COS):
2312 builtin_optab = sincos_optab; break;
2313 default:
2314 gcc_unreachable ();
2315 }
2316
2317 /* Make a suitable register to place result in. */
2318 mode = TYPE_MODE (TREE_TYPE (exp));
2319
2320 /* Check if sincos insn is available, otherwise fallback
2321 to sin or cos insn. */
2322 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2323 switch (DECL_FUNCTION_CODE (fndecl))
2324 {
2325 CASE_FLT_FN (BUILT_IN_SIN):
2326 builtin_optab = sin_optab; break;
2327 CASE_FLT_FN (BUILT_IN_COS):
2328 builtin_optab = cos_optab; break;
2329 default:
2330 gcc_unreachable ();
2331 }
2332
2333 /* Before working hard, check whether the instruction is available. */
2334 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2335 {
2336 rtx result = gen_reg_rtx (mode);
2337
2338 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2339 need to expand the argument again. This way, we will not perform
2340 side-effects more the once. */
2341 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2342
2343 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2344
2345 start_sequence ();
2346
2347 /* Compute into RESULT.
2348 Set RESULT to wherever the result comes back. */
2349 if (builtin_optab == sincos_optab)
2350 {
2351 int ok;
2352
2353 switch (DECL_FUNCTION_CODE (fndecl))
2354 {
2355 CASE_FLT_FN (BUILT_IN_SIN):
2356 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2357 break;
2358 CASE_FLT_FN (BUILT_IN_COS):
2359 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2360 break;
2361 default:
2362 gcc_unreachable ();
2363 }
2364 gcc_assert (ok);
2365 }
2366 else
2367 result = expand_unop (mode, builtin_optab, op0, result, 0);
2368
2369 if (result != 0)
2370 {
2371 /* Output the entire sequence. */
2372 insns = get_insns ();
2373 end_sequence ();
2374 emit_insn (insns);
2375 return result;
2376 }
2377
2378 /* If we were unable to expand via the builtin, stop the sequence
2379 (without outputting the insns) and call to the library function
2380 with the stabilized argument list. */
2381 end_sequence ();
2382 }
2383
2384 return expand_call (exp, target, target == const0_rtx);
2385 }
2386
2387 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2388 return an RTL instruction code that implements the functionality.
2389 If that isn't possible or available return CODE_FOR_nothing. */
2390
2391 static enum insn_code
2392 interclass_mathfn_icode (tree arg, tree fndecl)
2393 {
2394 bool errno_set = false;
2395 optab builtin_optab = unknown_optab;
2396 machine_mode mode;
2397
2398 switch (DECL_FUNCTION_CODE (fndecl))
2399 {
2400 CASE_FLT_FN (BUILT_IN_ILOGB):
2401 errno_set = true; builtin_optab = ilogb_optab; break;
2402 CASE_FLT_FN (BUILT_IN_ISINF):
2403 builtin_optab = isinf_optab; break;
2404 case BUILT_IN_ISNORMAL:
2405 case BUILT_IN_ISFINITE:
2406 CASE_FLT_FN (BUILT_IN_FINITE):
2407 case BUILT_IN_FINITED32:
2408 case BUILT_IN_FINITED64:
2409 case BUILT_IN_FINITED128:
2410 case BUILT_IN_ISINFD32:
2411 case BUILT_IN_ISINFD64:
2412 case BUILT_IN_ISINFD128:
2413 /* These builtins have no optabs (yet). */
2414 break;
2415 default:
2416 gcc_unreachable ();
2417 }
2418
2419 /* There's no easy way to detect the case we need to set EDOM. */
2420 if (flag_errno_math && errno_set)
2421 return CODE_FOR_nothing;
2422
2423 /* Optab mode depends on the mode of the input argument. */
2424 mode = TYPE_MODE (TREE_TYPE (arg));
2425
2426 if (builtin_optab)
2427 return optab_handler (builtin_optab, mode);
2428 return CODE_FOR_nothing;
2429 }
2430
2431 /* Expand a call to one of the builtin math functions that operate on
2432 floating point argument and output an integer result (ilogb, isinf,
2433 isnan, etc).
2434 Return 0 if a normal call should be emitted rather than expanding the
2435 function in-line. EXP is the expression that is a call to the builtin
2436 function; if convenient, the result should be placed in TARGET. */
2437
2438 static rtx
2439 expand_builtin_interclass_mathfn (tree exp, rtx target)
2440 {
2441 enum insn_code icode = CODE_FOR_nothing;
2442 rtx op0;
2443 tree fndecl = get_callee_fndecl (exp);
2444 machine_mode mode;
2445 tree arg;
2446
2447 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2448 return NULL_RTX;
2449
2450 arg = CALL_EXPR_ARG (exp, 0);
2451 icode = interclass_mathfn_icode (arg, fndecl);
2452 mode = TYPE_MODE (TREE_TYPE (arg));
2453
2454 if (icode != CODE_FOR_nothing)
2455 {
2456 struct expand_operand ops[1];
2457 rtx_insn *last = get_last_insn ();
2458 tree orig_arg = arg;
2459
2460 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2461 need to expand the argument again. This way, we will not perform
2462 side-effects more the once. */
2463 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2464
2465 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2466
2467 if (mode != GET_MODE (op0))
2468 op0 = convert_to_mode (mode, op0, 0);
2469
2470 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2471 if (maybe_legitimize_operands (icode, 0, 1, ops)
2472 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2473 return ops[0].value;
2474
2475 delete_insns_since (last);
2476 CALL_EXPR_ARG (exp, 0) = orig_arg;
2477 }
2478
2479 return NULL_RTX;
2480 }
2481
2482 /* Expand a call to the builtin sincos math function.
2483 Return NULL_RTX if a normal call should be emitted rather than expanding the
2484 function in-line. EXP is the expression that is a call to the builtin
2485 function. */
2486
2487 static rtx
2488 expand_builtin_sincos (tree exp)
2489 {
2490 rtx op0, op1, op2, target1, target2;
2491 machine_mode mode;
2492 tree arg, sinp, cosp;
2493 int result;
2494 location_t loc = EXPR_LOCATION (exp);
2495 tree alias_type, alias_off;
2496
2497 if (!validate_arglist (exp, REAL_TYPE,
2498 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2499 return NULL_RTX;
2500
2501 arg = CALL_EXPR_ARG (exp, 0);
2502 sinp = CALL_EXPR_ARG (exp, 1);
2503 cosp = CALL_EXPR_ARG (exp, 2);
2504
2505 /* Make a suitable register to place result in. */
2506 mode = TYPE_MODE (TREE_TYPE (arg));
2507
2508 /* Check if sincos insn is available, otherwise emit the call. */
2509 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2510 return NULL_RTX;
2511
2512 target1 = gen_reg_rtx (mode);
2513 target2 = gen_reg_rtx (mode);
2514
2515 op0 = expand_normal (arg);
2516 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2517 alias_off = build_int_cst (alias_type, 0);
2518 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2519 sinp, alias_off));
2520 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2521 cosp, alias_off));
2522
2523 /* Compute into target1 and target2.
2524 Set TARGET to wherever the result comes back. */
2525 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2526 gcc_assert (result);
2527
2528 /* Move target1 and target2 to the memory locations indicated
2529 by op1 and op2. */
2530 emit_move_insn (op1, target1);
2531 emit_move_insn (op2, target2);
2532
2533 return const0_rtx;
2534 }
2535
2536 /* Expand a call to the internal cexpi builtin to the sincos math function.
2537 EXP is the expression that is a call to the builtin function; if convenient,
2538 the result should be placed in TARGET. */
2539
2540 static rtx
2541 expand_builtin_cexpi (tree exp, rtx target)
2542 {
2543 tree fndecl = get_callee_fndecl (exp);
2544 tree arg, type;
2545 machine_mode mode;
2546 rtx op0, op1, op2;
2547 location_t loc = EXPR_LOCATION (exp);
2548
2549 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2550 return NULL_RTX;
2551
2552 arg = CALL_EXPR_ARG (exp, 0);
2553 type = TREE_TYPE (arg);
2554 mode = TYPE_MODE (TREE_TYPE (arg));
2555
2556 /* Try expanding via a sincos optab, fall back to emitting a libcall
2557 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2558 is only generated from sincos, cexp or if we have either of them. */
2559 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2560 {
2561 op1 = gen_reg_rtx (mode);
2562 op2 = gen_reg_rtx (mode);
2563
2564 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2565
2566 /* Compute into op1 and op2. */
2567 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2568 }
2569 else if (targetm.libc_has_function (function_sincos))
2570 {
2571 tree call, fn = NULL_TREE;
2572 tree top1, top2;
2573 rtx op1a, op2a;
2574
2575 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2576 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2577 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2578 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2579 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2580 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2581 else
2582 gcc_unreachable ();
2583
2584 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2585 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2586 op1a = copy_addr_to_reg (XEXP (op1, 0));
2587 op2a = copy_addr_to_reg (XEXP (op2, 0));
2588 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2589 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2590
2591 /* Make sure not to fold the sincos call again. */
2592 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2593 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2594 call, 3, arg, top1, top2));
2595 }
2596 else
2597 {
2598 tree call, fn = NULL_TREE, narg;
2599 tree ctype = build_complex_type (type);
2600
2601 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2602 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2603 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2604 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2605 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2606 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2607 else
2608 gcc_unreachable ();
2609
2610 /* If we don't have a decl for cexp create one. This is the
2611 friendliest fallback if the user calls __builtin_cexpi
2612 without full target C99 function support. */
2613 if (fn == NULL_TREE)
2614 {
2615 tree fntype;
2616 const char *name = NULL;
2617
2618 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2619 name = "cexpf";
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2621 name = "cexp";
2622 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2623 name = "cexpl";
2624
2625 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2626 fn = build_fn_decl (name, fntype);
2627 }
2628
2629 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2630 build_real (type, dconst0), arg);
2631
2632 /* Make sure not to fold the cexp call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2634 return expand_expr (build_call_nary (ctype, call, 1, narg),
2635 target, VOIDmode, EXPAND_NORMAL);
2636 }
2637
2638 /* Now build the proper return type. */
2639 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2640 make_tree (TREE_TYPE (arg), op2),
2641 make_tree (TREE_TYPE (arg), op1)),
2642 target, VOIDmode, EXPAND_NORMAL);
2643 }
2644
2645 /* Conveniently construct a function call expression. FNDECL names the
2646 function to be called, N is the number of arguments, and the "..."
2647 parameters are the argument expressions. Unlike build_call_exr
2648 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2649
2650 static tree
2651 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2652 {
2653 va_list ap;
2654 tree fntype = TREE_TYPE (fndecl);
2655 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2656
2657 va_start (ap, n);
2658 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2659 va_end (ap);
2660 SET_EXPR_LOCATION (fn, loc);
2661 return fn;
2662 }
2663
2664 /* Expand a call to one of the builtin rounding functions gcc defines
2665 as an extension (lfloor and lceil). As these are gcc extensions we
2666 do not need to worry about setting errno to EDOM.
2667 If expanding via optab fails, lower expression to (int)(floor(x)).
2668 EXP is the expression that is a call to the builtin function;
2669 if convenient, the result should be placed in TARGET. */
2670
2671 static rtx
2672 expand_builtin_int_roundingfn (tree exp, rtx target)
2673 {
2674 convert_optab builtin_optab;
2675 rtx op0, tmp;
2676 rtx_insn *insns;
2677 tree fndecl = get_callee_fndecl (exp);
2678 enum built_in_function fallback_fn;
2679 tree fallback_fndecl;
2680 machine_mode mode;
2681 tree arg;
2682
2683 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2684 gcc_unreachable ();
2685
2686 arg = CALL_EXPR_ARG (exp, 0);
2687
2688 switch (DECL_FUNCTION_CODE (fndecl))
2689 {
2690 CASE_FLT_FN (BUILT_IN_ICEIL):
2691 CASE_FLT_FN (BUILT_IN_LCEIL):
2692 CASE_FLT_FN (BUILT_IN_LLCEIL):
2693 builtin_optab = lceil_optab;
2694 fallback_fn = BUILT_IN_CEIL;
2695 break;
2696
2697 CASE_FLT_FN (BUILT_IN_IFLOOR):
2698 CASE_FLT_FN (BUILT_IN_LFLOOR):
2699 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2700 builtin_optab = lfloor_optab;
2701 fallback_fn = BUILT_IN_FLOOR;
2702 break;
2703
2704 default:
2705 gcc_unreachable ();
2706 }
2707
2708 /* Make a suitable register to place result in. */
2709 mode = TYPE_MODE (TREE_TYPE (exp));
2710
2711 target = gen_reg_rtx (mode);
2712
2713 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2714 need to expand the argument again. This way, we will not perform
2715 side-effects more the once. */
2716 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2717
2718 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2719
2720 start_sequence ();
2721
2722 /* Compute into TARGET. */
2723 if (expand_sfix_optab (target, op0, builtin_optab))
2724 {
2725 /* Output the entire sequence. */
2726 insns = get_insns ();
2727 end_sequence ();
2728 emit_insn (insns);
2729 return target;
2730 }
2731
2732 /* If we were unable to expand via the builtin, stop the sequence
2733 (without outputting the insns). */
2734 end_sequence ();
2735
2736 /* Fall back to floating point rounding optab. */
2737 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2738
2739 /* For non-C99 targets we may end up without a fallback fndecl here
2740 if the user called __builtin_lfloor directly. In this case emit
2741 a call to the floor/ceil variants nevertheless. This should result
2742 in the best user experience for not full C99 targets. */
2743 if (fallback_fndecl == NULL_TREE)
2744 {
2745 tree fntype;
2746 const char *name = NULL;
2747
2748 switch (DECL_FUNCTION_CODE (fndecl))
2749 {
2750 case BUILT_IN_ICEIL:
2751 case BUILT_IN_LCEIL:
2752 case BUILT_IN_LLCEIL:
2753 name = "ceil";
2754 break;
2755 case BUILT_IN_ICEILF:
2756 case BUILT_IN_LCEILF:
2757 case BUILT_IN_LLCEILF:
2758 name = "ceilf";
2759 break;
2760 case BUILT_IN_ICEILL:
2761 case BUILT_IN_LCEILL:
2762 case BUILT_IN_LLCEILL:
2763 name = "ceill";
2764 break;
2765 case BUILT_IN_IFLOOR:
2766 case BUILT_IN_LFLOOR:
2767 case BUILT_IN_LLFLOOR:
2768 name = "floor";
2769 break;
2770 case BUILT_IN_IFLOORF:
2771 case BUILT_IN_LFLOORF:
2772 case BUILT_IN_LLFLOORF:
2773 name = "floorf";
2774 break;
2775 case BUILT_IN_IFLOORL:
2776 case BUILT_IN_LFLOORL:
2777 case BUILT_IN_LLFLOORL:
2778 name = "floorl";
2779 break;
2780 default:
2781 gcc_unreachable ();
2782 }
2783
2784 fntype = build_function_type_list (TREE_TYPE (arg),
2785 TREE_TYPE (arg), NULL_TREE);
2786 fallback_fndecl = build_fn_decl (name, fntype);
2787 }
2788
2789 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2790
2791 tmp = expand_normal (exp);
2792 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2793
2794 /* Truncate the result of floating point optab to integer
2795 via expand_fix (). */
2796 target = gen_reg_rtx (mode);
2797 expand_fix (target, tmp, 0);
2798
2799 return target;
2800 }
2801
2802 /* Expand a call to one of the builtin math functions doing integer
2803 conversion (lrint).
2804 Return 0 if a normal call should be emitted rather than expanding the
2805 function in-line. EXP is the expression that is a call to the builtin
2806 function; if convenient, the result should be placed in TARGET. */
2807
2808 static rtx
2809 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2810 {
2811 convert_optab builtin_optab;
2812 rtx op0;
2813 rtx_insn *insns;
2814 tree fndecl = get_callee_fndecl (exp);
2815 tree arg;
2816 machine_mode mode;
2817 enum built_in_function fallback_fn = BUILT_IN_NONE;
2818
2819 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2820 gcc_unreachable ();
2821
2822 arg = CALL_EXPR_ARG (exp, 0);
2823
2824 switch (DECL_FUNCTION_CODE (fndecl))
2825 {
2826 CASE_FLT_FN (BUILT_IN_IRINT):
2827 fallback_fn = BUILT_IN_LRINT;
2828 /* FALLTHRU */
2829 CASE_FLT_FN (BUILT_IN_LRINT):
2830 CASE_FLT_FN (BUILT_IN_LLRINT):
2831 builtin_optab = lrint_optab;
2832 break;
2833
2834 CASE_FLT_FN (BUILT_IN_IROUND):
2835 fallback_fn = BUILT_IN_LROUND;
2836 /* FALLTHRU */
2837 CASE_FLT_FN (BUILT_IN_LROUND):
2838 CASE_FLT_FN (BUILT_IN_LLROUND):
2839 builtin_optab = lround_optab;
2840 break;
2841
2842 default:
2843 gcc_unreachable ();
2844 }
2845
2846 /* There's no easy way to detect the case we need to set EDOM. */
2847 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2848 return NULL_RTX;
2849
2850 /* Make a suitable register to place result in. */
2851 mode = TYPE_MODE (TREE_TYPE (exp));
2852
2853 /* There's no easy way to detect the case we need to set EDOM. */
2854 if (!flag_errno_math)
2855 {
2856 rtx result = gen_reg_rtx (mode);
2857
2858 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2859 need to expand the argument again. This way, we will not perform
2860 side-effects more the once. */
2861 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2862
2863 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2864
2865 start_sequence ();
2866
2867 if (expand_sfix_optab (result, op0, builtin_optab))
2868 {
2869 /* Output the entire sequence. */
2870 insns = get_insns ();
2871 end_sequence ();
2872 emit_insn (insns);
2873 return result;
2874 }
2875
2876 /* If we were unable to expand via the builtin, stop the sequence
2877 (without outputting the insns) and call to the library function
2878 with the stabilized argument list. */
2879 end_sequence ();
2880 }
2881
2882 if (fallback_fn != BUILT_IN_NONE)
2883 {
2884 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2885 targets, (int) round (x) should never be transformed into
2886 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2887 a call to lround in the hope that the target provides at least some
2888 C99 functions. This should result in the best user experience for
2889 not full C99 targets. */
2890 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2891 fallback_fn, 0);
2892
2893 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2894 fallback_fndecl, 1, arg);
2895
2896 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2897 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2898 return convert_to_mode (mode, target, 0);
2899 }
2900
2901 return expand_call (exp, target, target == const0_rtx);
2902 }
2903
2904 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2905 a normal call should be emitted rather than expanding the function
2906 in-line. EXP is the expression that is a call to the builtin
2907 function; if convenient, the result should be placed in TARGET. */
2908
2909 static rtx
2910 expand_builtin_powi (tree exp, rtx target)
2911 {
2912 tree arg0, arg1;
2913 rtx op0, op1;
2914 machine_mode mode;
2915 machine_mode mode2;
2916
2917 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2918 return NULL_RTX;
2919
2920 arg0 = CALL_EXPR_ARG (exp, 0);
2921 arg1 = CALL_EXPR_ARG (exp, 1);
2922 mode = TYPE_MODE (TREE_TYPE (exp));
2923
2924 /* Emit a libcall to libgcc. */
2925
2926 /* Mode of the 2nd argument must match that of an int. */
2927 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2928
2929 if (target == NULL_RTX)
2930 target = gen_reg_rtx (mode);
2931
2932 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2933 if (GET_MODE (op0) != mode)
2934 op0 = convert_to_mode (mode, op0, 0);
2935 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2936 if (GET_MODE (op1) != mode2)
2937 op1 = convert_to_mode (mode2, op1, 0);
2938
2939 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2940 target, LCT_CONST, mode, 2,
2941 op0, mode, op1, mode2);
2942
2943 return target;
2944 }
2945
2946 /* Expand expression EXP which is a call to the strlen builtin. Return
2947 NULL_RTX if we failed the caller should emit a normal call, otherwise
2948 try to get the result in TARGET, if convenient. */
2949
2950 static rtx
2951 expand_builtin_strlen (tree exp, rtx target,
2952 machine_mode target_mode)
2953 {
2954 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2955 return NULL_RTX;
2956 else
2957 {
2958 struct expand_operand ops[4];
2959 rtx pat;
2960 tree len;
2961 tree src = CALL_EXPR_ARG (exp, 0);
2962 rtx src_reg;
2963 rtx_insn *before_strlen;
2964 machine_mode insn_mode = target_mode;
2965 enum insn_code icode = CODE_FOR_nothing;
2966 unsigned int align;
2967
2968 /* If the length can be computed at compile-time, return it. */
2969 len = c_strlen (src, 0);
2970 if (len)
2971 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2972
2973 /* If the length can be computed at compile-time and is constant
2974 integer, but there are side-effects in src, evaluate
2975 src for side-effects, then return len.
2976 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2977 can be optimized into: i++; x = 3; */
2978 len = c_strlen (src, 1);
2979 if (len && TREE_CODE (len) == INTEGER_CST)
2980 {
2981 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2982 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2983 }
2984
2985 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2986
2987 /* If SRC is not a pointer type, don't do this operation inline. */
2988 if (align == 0)
2989 return NULL_RTX;
2990
2991 /* Bail out if we can't compute strlen in the right mode. */
2992 while (insn_mode != VOIDmode)
2993 {
2994 icode = optab_handler (strlen_optab, insn_mode);
2995 if (icode != CODE_FOR_nothing)
2996 break;
2997
2998 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2999 }
3000 if (insn_mode == VOIDmode)
3001 return NULL_RTX;
3002
3003 /* Make a place to hold the source address. We will not expand
3004 the actual source until we are sure that the expansion will
3005 not fail -- there are trees that cannot be expanded twice. */
3006 src_reg = gen_reg_rtx (Pmode);
3007
3008 /* Mark the beginning of the strlen sequence so we can emit the
3009 source operand later. */
3010 before_strlen = get_last_insn ();
3011
3012 create_output_operand (&ops[0], target, insn_mode);
3013 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3014 create_integer_operand (&ops[2], 0);
3015 create_integer_operand (&ops[3], align);
3016 if (!maybe_expand_insn (icode, 4, ops))
3017 return NULL_RTX;
3018
3019 /* Now that we are assured of success, expand the source. */
3020 start_sequence ();
3021 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3022 if (pat != src_reg)
3023 {
3024 #ifdef POINTERS_EXTEND_UNSIGNED
3025 if (GET_MODE (pat) != Pmode)
3026 pat = convert_to_mode (Pmode, pat,
3027 POINTERS_EXTEND_UNSIGNED);
3028 #endif
3029 emit_move_insn (src_reg, pat);
3030 }
3031 pat = get_insns ();
3032 end_sequence ();
3033
3034 if (before_strlen)
3035 emit_insn_after (pat, before_strlen);
3036 else
3037 emit_insn_before (pat, get_insns ());
3038
3039 /* Return the value in the proper mode for this function. */
3040 if (GET_MODE (ops[0].value) == target_mode)
3041 target = ops[0].value;
3042 else if (target != 0)
3043 convert_move (target, ops[0].value, 0);
3044 else
3045 target = convert_to_mode (target_mode, ops[0].value, 0);
3046
3047 return target;
3048 }
3049 }
3050
3051 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3052 bytes from constant string DATA + OFFSET and return it as target
3053 constant. */
3054
3055 static rtx
3056 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3057 machine_mode mode)
3058 {
3059 const char *str = (const char *) data;
3060
3061 gcc_assert (offset >= 0
3062 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3063 <= strlen (str) + 1));
3064
3065 return c_readstr (str + offset, mode);
3066 }
3067
3068 /* LEN specify length of the block of memcpy/memset operation.
3069 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3070 In some cases we can make very likely guess on max size, then we
3071 set it into PROBABLE_MAX_SIZE. */
3072
3073 static void
3074 determine_block_size (tree len, rtx len_rtx,
3075 unsigned HOST_WIDE_INT *min_size,
3076 unsigned HOST_WIDE_INT *max_size,
3077 unsigned HOST_WIDE_INT *probable_max_size)
3078 {
3079 if (CONST_INT_P (len_rtx))
3080 {
3081 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3082 return;
3083 }
3084 else
3085 {
3086 wide_int min, max;
3087 enum value_range_type range_type = VR_UNDEFINED;
3088
3089 /* Determine bounds from the type. */
3090 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3091 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3092 else
3093 *min_size = 0;
3094 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3095 *probable_max_size = *max_size
3096 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3097 else
3098 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3099
3100 if (TREE_CODE (len) == SSA_NAME)
3101 range_type = get_range_info (len, &min, &max);
3102 if (range_type == VR_RANGE)
3103 {
3104 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3105 *min_size = min.to_uhwi ();
3106 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3107 *probable_max_size = *max_size = max.to_uhwi ();
3108 }
3109 else if (range_type == VR_ANTI_RANGE)
3110 {
3111 /* Anti range 0...N lets us to determine minimal size to N+1. */
3112 if (min == 0)
3113 {
3114 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3115 *min_size = max.to_uhwi () + 1;
3116 }
3117 /* Code like
3118
3119 int n;
3120 if (n < 100)
3121 memcpy (a, b, n)
3122
3123 Produce anti range allowing negative values of N. We still
3124 can use the information and make a guess that N is not negative.
3125 */
3126 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3127 *probable_max_size = min.to_uhwi () - 1;
3128 }
3129 }
3130 gcc_checking_assert (*max_size <=
3131 (unsigned HOST_WIDE_INT)
3132 GET_MODE_MASK (GET_MODE (len_rtx)));
3133 }
3134
3135 /* Helper function to do the actual work for expand_builtin_memcpy. */
3136
3137 static rtx
3138 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3139 {
3140 const char *src_str;
3141 unsigned int src_align = get_pointer_alignment (src);
3142 unsigned int dest_align = get_pointer_alignment (dest);
3143 rtx dest_mem, src_mem, dest_addr, len_rtx;
3144 HOST_WIDE_INT expected_size = -1;
3145 unsigned int expected_align = 0;
3146 unsigned HOST_WIDE_INT min_size;
3147 unsigned HOST_WIDE_INT max_size;
3148 unsigned HOST_WIDE_INT probable_max_size;
3149
3150 /* If DEST is not a pointer type, call the normal function. */
3151 if (dest_align == 0)
3152 return NULL_RTX;
3153
3154 /* If either SRC is not a pointer type, don't do this
3155 operation in-line. */
3156 if (src_align == 0)
3157 return NULL_RTX;
3158
3159 if (currently_expanding_gimple_stmt)
3160 stringop_block_profile (currently_expanding_gimple_stmt,
3161 &expected_align, &expected_size);
3162
3163 if (expected_align < dest_align)
3164 expected_align = dest_align;
3165 dest_mem = get_memory_rtx (dest, len);
3166 set_mem_align (dest_mem, dest_align);
3167 len_rtx = expand_normal (len);
3168 determine_block_size (len, len_rtx, &min_size, &max_size,
3169 &probable_max_size);
3170 src_str = c_getstr (src);
3171
3172 /* If SRC is a string constant and block move would be done
3173 by pieces, we can avoid loading the string from memory
3174 and only stored the computed constants. */
3175 if (src_str
3176 && CONST_INT_P (len_rtx)
3177 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3178 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3179 CONST_CAST (char *, src_str),
3180 dest_align, false))
3181 {
3182 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3183 builtin_memcpy_read_str,
3184 CONST_CAST (char *, src_str),
3185 dest_align, false, 0);
3186 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3187 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3188 return dest_mem;
3189 }
3190
3191 src_mem = get_memory_rtx (src, len);
3192 set_mem_align (src_mem, src_align);
3193
3194 /* Copy word part most expediently. */
3195 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3196 CALL_EXPR_TAILCALL (exp)
3197 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3198 expected_align, expected_size,
3199 min_size, max_size, probable_max_size);
3200
3201 if (dest_addr == 0)
3202 {
3203 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3204 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3205 }
3206
3207 return dest_addr;
3208 }
3209
3210 /* Expand a call EXP to the memcpy builtin.
3211 Return NULL_RTX if we failed, the caller should emit a normal call,
3212 otherwise try to get the result in TARGET, if convenient (and in
3213 mode MODE if that's convenient). */
3214
3215 static rtx
3216 expand_builtin_memcpy (tree exp, rtx target)
3217 {
3218 if (!validate_arglist (exp,
3219 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3220 return NULL_RTX;
3221 else
3222 {
3223 tree dest = CALL_EXPR_ARG (exp, 0);
3224 tree src = CALL_EXPR_ARG (exp, 1);
3225 tree len = CALL_EXPR_ARG (exp, 2);
3226 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3227 }
3228 }
3229
3230 /* Expand an instrumented call EXP to the memcpy builtin.
3231 Return NULL_RTX if we failed, the caller should emit a normal call,
3232 otherwise try to get the result in TARGET, if convenient (and in
3233 mode MODE if that's convenient). */
3234
3235 static rtx
3236 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3237 {
3238 if (!validate_arglist (exp,
3239 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3240 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3241 INTEGER_TYPE, VOID_TYPE))
3242 return NULL_RTX;
3243 else
3244 {
3245 tree dest = CALL_EXPR_ARG (exp, 0);
3246 tree src = CALL_EXPR_ARG (exp, 2);
3247 tree len = CALL_EXPR_ARG (exp, 4);
3248 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3249
3250 /* Return src bounds with the result. */
3251 if (res)
3252 {
3253 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3254 expand_normal (CALL_EXPR_ARG (exp, 1)));
3255 res = chkp_join_splitted_slot (res, bnd);
3256 }
3257 return res;
3258 }
3259 }
3260
3261 /* Expand a call EXP to the mempcpy builtin.
3262 Return NULL_RTX if we failed; the caller should emit a normal call,
3263 otherwise try to get the result in TARGET, if convenient (and in
3264 mode MODE if that's convenient). If ENDP is 0 return the
3265 destination pointer, if ENDP is 1 return the end pointer ala
3266 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3267 stpcpy. */
3268
3269 static rtx
3270 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3271 {
3272 if (!validate_arglist (exp,
3273 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3276 {
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 1);
3279 tree len = CALL_EXPR_ARG (exp, 2);
3280 return expand_builtin_mempcpy_args (dest, src, len,
3281 target, mode, /*endp=*/ 1,
3282 exp);
3283 }
3284 }
3285
3286 /* Expand an instrumented call EXP to the mempcpy builtin.
3287 Return NULL_RTX if we failed, the caller should emit a normal call,
3288 otherwise try to get the result in TARGET, if convenient (and in
3289 mode MODE if that's convenient). */
3290
3291 static rtx
3292 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3293 {
3294 if (!validate_arglist (exp,
3295 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3296 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3297 INTEGER_TYPE, VOID_TYPE))
3298 return NULL_RTX;
3299 else
3300 {
3301 tree dest = CALL_EXPR_ARG (exp, 0);
3302 tree src = CALL_EXPR_ARG (exp, 2);
3303 tree len = CALL_EXPR_ARG (exp, 4);
3304 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3305 mode, 1, exp);
3306
3307 /* Return src bounds with the result. */
3308 if (res)
3309 {
3310 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3311 expand_normal (CALL_EXPR_ARG (exp, 1)));
3312 res = chkp_join_splitted_slot (res, bnd);
3313 }
3314 return res;
3315 }
3316 }
3317
3318 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3319 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3320 so that this can also be called without constructing an actual CALL_EXPR.
3321 The other arguments and return value are the same as for
3322 expand_builtin_mempcpy. */
3323
3324 static rtx
3325 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3326 rtx target, machine_mode mode, int endp,
3327 tree orig_exp)
3328 {
3329 tree fndecl = get_callee_fndecl (orig_exp);
3330
3331 /* If return value is ignored, transform mempcpy into memcpy. */
3332 if (target == const0_rtx
3333 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3334 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3335 {
3336 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3337 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3338 dest, src, len);
3339 return expand_expr (result, target, mode, EXPAND_NORMAL);
3340 }
3341 else if (target == const0_rtx
3342 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3343 {
3344 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3345 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3346 dest, src, len);
3347 return expand_expr (result, target, mode, EXPAND_NORMAL);
3348 }
3349 else
3350 {
3351 const char *src_str;
3352 unsigned int src_align = get_pointer_alignment (src);
3353 unsigned int dest_align = get_pointer_alignment (dest);
3354 rtx dest_mem, src_mem, len_rtx;
3355
3356 /* If either SRC or DEST is not a pointer type, don't do this
3357 operation in-line. */
3358 if (dest_align == 0 || src_align == 0)
3359 return NULL_RTX;
3360
3361 /* If LEN is not constant, call the normal function. */
3362 if (! tree_fits_uhwi_p (len))
3363 return NULL_RTX;
3364
3365 len_rtx = expand_normal (len);
3366 src_str = c_getstr (src);
3367
3368 /* If SRC is a string constant and block move would be done
3369 by pieces, we can avoid loading the string from memory
3370 and only stored the computed constants. */
3371 if (src_str
3372 && CONST_INT_P (len_rtx)
3373 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3374 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3375 CONST_CAST (char *, src_str),
3376 dest_align, false))
3377 {
3378 dest_mem = get_memory_rtx (dest, len);
3379 set_mem_align (dest_mem, dest_align);
3380 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3381 builtin_memcpy_read_str,
3382 CONST_CAST (char *, src_str),
3383 dest_align, false, endp);
3384 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3385 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3386 return dest_mem;
3387 }
3388
3389 if (CONST_INT_P (len_rtx)
3390 && can_move_by_pieces (INTVAL (len_rtx),
3391 MIN (dest_align, src_align)))
3392 {
3393 dest_mem = get_memory_rtx (dest, len);
3394 set_mem_align (dest_mem, dest_align);
3395 src_mem = get_memory_rtx (src, len);
3396 set_mem_align (src_mem, src_align);
3397 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3398 MIN (dest_align, src_align), endp);
3399 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3400 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3401 return dest_mem;
3402 }
3403
3404 return NULL_RTX;
3405 }
3406 }
3407
3408 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3409 we failed, the caller should emit a normal call, otherwise try to
3410 get the result in TARGET, if convenient. If ENDP is 0 return the
3411 destination pointer, if ENDP is 1 return the end pointer ala
3412 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3413 stpcpy. */
3414
3415 static rtx
3416 expand_movstr (tree dest, tree src, rtx target, int endp)
3417 {
3418 struct expand_operand ops[3];
3419 rtx dest_mem;
3420 rtx src_mem;
3421
3422 if (!targetm.have_movstr ())
3423 return NULL_RTX;
3424
3425 dest_mem = get_memory_rtx (dest, NULL);
3426 src_mem = get_memory_rtx (src, NULL);
3427 if (!endp)
3428 {
3429 target = force_reg (Pmode, XEXP (dest_mem, 0));
3430 dest_mem = replace_equiv_address (dest_mem, target);
3431 }
3432
3433 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3434 create_fixed_operand (&ops[1], dest_mem);
3435 create_fixed_operand (&ops[2], src_mem);
3436 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3437 return NULL_RTX;
3438
3439 if (endp && target != const0_rtx)
3440 {
3441 target = ops[0].value;
3442 /* movstr is supposed to set end to the address of the NUL
3443 terminator. If the caller requested a mempcpy-like return value,
3444 adjust it. */
3445 if (endp == 1)
3446 {
3447 rtx tem = plus_constant (GET_MODE (target),
3448 gen_lowpart (GET_MODE (target), target), 1);
3449 emit_move_insn (target, force_operand (tem, NULL_RTX));
3450 }
3451 }
3452 return target;
3453 }
3454
3455 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3456 NULL_RTX if we failed the caller should emit a normal call, otherwise
3457 try to get the result in TARGET, if convenient (and in mode MODE if that's
3458 convenient). */
3459
3460 static rtx
3461 expand_builtin_strcpy (tree exp, rtx target)
3462 {
3463 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3464 {
3465 tree dest = CALL_EXPR_ARG (exp, 0);
3466 tree src = CALL_EXPR_ARG (exp, 1);
3467 return expand_builtin_strcpy_args (dest, src, target);
3468 }
3469 return NULL_RTX;
3470 }
3471
3472 /* Helper function to do the actual work for expand_builtin_strcpy. The
3473 arguments to the builtin_strcpy call DEST and SRC are broken out
3474 so that this can also be called without constructing an actual CALL_EXPR.
3475 The other arguments and return value are the same as for
3476 expand_builtin_strcpy. */
3477
3478 static rtx
3479 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3480 {
3481 return expand_movstr (dest, src, target, /*endp=*/0);
3482 }
3483
3484 /* Expand a call EXP to the stpcpy builtin.
3485 Return NULL_RTX if we failed the caller should emit a normal call,
3486 otherwise try to get the result in TARGET, if convenient (and in
3487 mode MODE if that's convenient). */
3488
3489 static rtx
3490 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3491 {
3492 tree dst, src;
3493 location_t loc = EXPR_LOCATION (exp);
3494
3495 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3496 return NULL_RTX;
3497
3498 dst = CALL_EXPR_ARG (exp, 0);
3499 src = CALL_EXPR_ARG (exp, 1);
3500
3501 /* If return value is ignored, transform stpcpy into strcpy. */
3502 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3503 {
3504 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3505 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3506 return expand_expr (result, target, mode, EXPAND_NORMAL);
3507 }
3508 else
3509 {
3510 tree len, lenp1;
3511 rtx ret;
3512
3513 /* Ensure we get an actual string whose length can be evaluated at
3514 compile-time, not an expression containing a string. This is
3515 because the latter will potentially produce pessimized code
3516 when used to produce the return value. */
3517 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3518 return expand_movstr (dst, src, target, /*endp=*/2);
3519
3520 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3521 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3522 target, mode, /*endp=*/2,
3523 exp);
3524
3525 if (ret)
3526 return ret;
3527
3528 if (TREE_CODE (len) == INTEGER_CST)
3529 {
3530 rtx len_rtx = expand_normal (len);
3531
3532 if (CONST_INT_P (len_rtx))
3533 {
3534 ret = expand_builtin_strcpy_args (dst, src, target);
3535
3536 if (ret)
3537 {
3538 if (! target)
3539 {
3540 if (mode != VOIDmode)
3541 target = gen_reg_rtx (mode);
3542 else
3543 target = gen_reg_rtx (GET_MODE (ret));
3544 }
3545 if (GET_MODE (target) != GET_MODE (ret))
3546 ret = gen_lowpart (GET_MODE (target), ret);
3547
3548 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3549 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3550 gcc_assert (ret);
3551
3552 return target;
3553 }
3554 }
3555 }
3556
3557 return expand_movstr (dst, src, target, /*endp=*/2);
3558 }
3559 }
3560
3561 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3562 bytes from constant string DATA + OFFSET and return it as target
3563 constant. */
3564
3565 rtx
3566 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3567 machine_mode mode)
3568 {
3569 const char *str = (const char *) data;
3570
3571 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3572 return const0_rtx;
3573
3574 return c_readstr (str + offset, mode);
3575 }
3576
3577 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3578 NULL_RTX if we failed the caller should emit a normal call. */
3579
3580 static rtx
3581 expand_builtin_strncpy (tree exp, rtx target)
3582 {
3583 location_t loc = EXPR_LOCATION (exp);
3584
3585 if (validate_arglist (exp,
3586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3587 {
3588 tree dest = CALL_EXPR_ARG (exp, 0);
3589 tree src = CALL_EXPR_ARG (exp, 1);
3590 tree len = CALL_EXPR_ARG (exp, 2);
3591 tree slen = c_strlen (src, 1);
3592
3593 /* We must be passed a constant len and src parameter. */
3594 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3595 return NULL_RTX;
3596
3597 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3598
3599 /* We're required to pad with trailing zeros if the requested
3600 len is greater than strlen(s2)+1. In that case try to
3601 use store_by_pieces, if it fails, punt. */
3602 if (tree_int_cst_lt (slen, len))
3603 {
3604 unsigned int dest_align = get_pointer_alignment (dest);
3605 const char *p = c_getstr (src);
3606 rtx dest_mem;
3607
3608 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3609 || !can_store_by_pieces (tree_to_uhwi (len),
3610 builtin_strncpy_read_str,
3611 CONST_CAST (char *, p),
3612 dest_align, false))
3613 return NULL_RTX;
3614
3615 dest_mem = get_memory_rtx (dest, len);
3616 store_by_pieces (dest_mem, tree_to_uhwi (len),
3617 builtin_strncpy_read_str,
3618 CONST_CAST (char *, p), dest_align, false, 0);
3619 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3620 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3621 return dest_mem;
3622 }
3623 }
3624 return NULL_RTX;
3625 }
3626
3627 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3628 bytes from constant string DATA + OFFSET and return it as target
3629 constant. */
3630
3631 rtx
3632 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3633 machine_mode mode)
3634 {
3635 const char *c = (const char *) data;
3636 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3637
3638 memset (p, *c, GET_MODE_SIZE (mode));
3639
3640 return c_readstr (p, mode);
3641 }
3642
3643 /* Callback routine for store_by_pieces. Return the RTL of a register
3644 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3645 char value given in the RTL register data. For example, if mode is
3646 4 bytes wide, return the RTL for 0x01010101*data. */
3647
3648 static rtx
3649 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3650 machine_mode mode)
3651 {
3652 rtx target, coeff;
3653 size_t size;
3654 char *p;
3655
3656 size = GET_MODE_SIZE (mode);
3657 if (size == 1)
3658 return (rtx) data;
3659
3660 p = XALLOCAVEC (char, size);
3661 memset (p, 1, size);
3662 coeff = c_readstr (p, mode);
3663
3664 target = convert_to_mode (mode, (rtx) data, 1);
3665 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3666 return force_reg (mode, target);
3667 }
3668
3669 /* Expand expression EXP, which is a call to the memset builtin. Return
3670 NULL_RTX if we failed the caller should emit a normal call, otherwise
3671 try to get the result in TARGET, if convenient (and in mode MODE if that's
3672 convenient). */
3673
3674 static rtx
3675 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3676 {
3677 if (!validate_arglist (exp,
3678 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3679 return NULL_RTX;
3680 else
3681 {
3682 tree dest = CALL_EXPR_ARG (exp, 0);
3683 tree val = CALL_EXPR_ARG (exp, 1);
3684 tree len = CALL_EXPR_ARG (exp, 2);
3685 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3686 }
3687 }
3688
3689 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3690 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3691 try to get the result in TARGET, if convenient (and in mode MODE if that's
3692 convenient). */
3693
3694 static rtx
3695 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3696 {
3697 if (!validate_arglist (exp,
3698 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3699 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3700 return NULL_RTX;
3701 else
3702 {
3703 tree dest = CALL_EXPR_ARG (exp, 0);
3704 tree val = CALL_EXPR_ARG (exp, 2);
3705 tree len = CALL_EXPR_ARG (exp, 3);
3706 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3707
3708 /* Return src bounds with the result. */
3709 if (res)
3710 {
3711 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3712 expand_normal (CALL_EXPR_ARG (exp, 1)));
3713 res = chkp_join_splitted_slot (res, bnd);
3714 }
3715 return res;
3716 }
3717 }
3718
3719 /* Helper function to do the actual work for expand_builtin_memset. The
3720 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3721 so that this can also be called without constructing an actual CALL_EXPR.
3722 The other arguments and return value are the same as for
3723 expand_builtin_memset. */
3724
3725 static rtx
3726 expand_builtin_memset_args (tree dest, tree val, tree len,
3727 rtx target, machine_mode mode, tree orig_exp)
3728 {
3729 tree fndecl, fn;
3730 enum built_in_function fcode;
3731 machine_mode val_mode;
3732 char c;
3733 unsigned int dest_align;
3734 rtx dest_mem, dest_addr, len_rtx;
3735 HOST_WIDE_INT expected_size = -1;
3736 unsigned int expected_align = 0;
3737 unsigned HOST_WIDE_INT min_size;
3738 unsigned HOST_WIDE_INT max_size;
3739 unsigned HOST_WIDE_INT probable_max_size;
3740
3741 dest_align = get_pointer_alignment (dest);
3742
3743 /* If DEST is not a pointer type, don't do this operation in-line. */
3744 if (dest_align == 0)
3745 return NULL_RTX;
3746
3747 if (currently_expanding_gimple_stmt)
3748 stringop_block_profile (currently_expanding_gimple_stmt,
3749 &expected_align, &expected_size);
3750
3751 if (expected_align < dest_align)
3752 expected_align = dest_align;
3753
3754 /* If the LEN parameter is zero, return DEST. */
3755 if (integer_zerop (len))
3756 {
3757 /* Evaluate and ignore VAL in case it has side-effects. */
3758 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3759 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3760 }
3761
3762 /* Stabilize the arguments in case we fail. */
3763 dest = builtin_save_expr (dest);
3764 val = builtin_save_expr (val);
3765 len = builtin_save_expr (len);
3766
3767 len_rtx = expand_normal (len);
3768 determine_block_size (len, len_rtx, &min_size, &max_size,
3769 &probable_max_size);
3770 dest_mem = get_memory_rtx (dest, len);
3771 val_mode = TYPE_MODE (unsigned_char_type_node);
3772
3773 if (TREE_CODE (val) != INTEGER_CST)
3774 {
3775 rtx val_rtx;
3776
3777 val_rtx = expand_normal (val);
3778 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3779
3780 /* Assume that we can memset by pieces if we can store
3781 * the coefficients by pieces (in the required modes).
3782 * We can't pass builtin_memset_gen_str as that emits RTL. */
3783 c = 1;
3784 if (tree_fits_uhwi_p (len)
3785 && can_store_by_pieces (tree_to_uhwi (len),
3786 builtin_memset_read_str, &c, dest_align,
3787 true))
3788 {
3789 val_rtx = force_reg (val_mode, val_rtx);
3790 store_by_pieces (dest_mem, tree_to_uhwi (len),
3791 builtin_memset_gen_str, val_rtx, dest_align,
3792 true, 0);
3793 }
3794 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3795 dest_align, expected_align,
3796 expected_size, min_size, max_size,
3797 probable_max_size))
3798 goto do_libcall;
3799
3800 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3801 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3802 return dest_mem;
3803 }
3804
3805 if (target_char_cast (val, &c))
3806 goto do_libcall;
3807
3808 if (c)
3809 {
3810 if (tree_fits_uhwi_p (len)
3811 && can_store_by_pieces (tree_to_uhwi (len),
3812 builtin_memset_read_str, &c, dest_align,
3813 true))
3814 store_by_pieces (dest_mem, tree_to_uhwi (len),
3815 builtin_memset_read_str, &c, dest_align, true, 0);
3816 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3817 gen_int_mode (c, val_mode),
3818 dest_align, expected_align,
3819 expected_size, min_size, max_size,
3820 probable_max_size))
3821 goto do_libcall;
3822
3823 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3824 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3825 return dest_mem;
3826 }
3827
3828 set_mem_align (dest_mem, dest_align);
3829 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3830 CALL_EXPR_TAILCALL (orig_exp)
3831 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3832 expected_align, expected_size,
3833 min_size, max_size,
3834 probable_max_size);
3835
3836 if (dest_addr == 0)
3837 {
3838 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3839 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3840 }
3841
3842 return dest_addr;
3843
3844 do_libcall:
3845 fndecl = get_callee_fndecl (orig_exp);
3846 fcode = DECL_FUNCTION_CODE (fndecl);
3847 if (fcode == BUILT_IN_MEMSET
3848 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3849 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3850 dest, val, len);
3851 else if (fcode == BUILT_IN_BZERO)
3852 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3853 dest, len);
3854 else
3855 gcc_unreachable ();
3856 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3857 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3858 return expand_call (fn, target, target == const0_rtx);
3859 }
3860
3861 /* Expand expression EXP, which is a call to the bzero builtin. Return
3862 NULL_RTX if we failed the caller should emit a normal call. */
3863
3864 static rtx
3865 expand_builtin_bzero (tree exp)
3866 {
3867 tree dest, size;
3868 location_t loc = EXPR_LOCATION (exp);
3869
3870 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3871 return NULL_RTX;
3872
3873 dest = CALL_EXPR_ARG (exp, 0);
3874 size = CALL_EXPR_ARG (exp, 1);
3875
3876 /* New argument list transforming bzero(ptr x, int y) to
3877 memset(ptr x, int 0, size_t y). This is done this way
3878 so that if it isn't expanded inline, we fallback to
3879 calling bzero instead of memset. */
3880
3881 return expand_builtin_memset_args (dest, integer_zero_node,
3882 fold_convert_loc (loc,
3883 size_type_node, size),
3884 const0_rtx, VOIDmode, exp);
3885 }
3886
3887 /* Try to expand cmpstr operation ICODE with the given operands.
3888 Return the result rtx on success, otherwise return null. */
3889
3890 static rtx
3891 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3892 HOST_WIDE_INT align)
3893 {
3894 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3895
3896 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3897 target = NULL_RTX;
3898
3899 struct expand_operand ops[4];
3900 create_output_operand (&ops[0], target, insn_mode);
3901 create_fixed_operand (&ops[1], arg1_rtx);
3902 create_fixed_operand (&ops[2], arg2_rtx);
3903 create_integer_operand (&ops[3], align);
3904 if (maybe_expand_insn (icode, 4, ops))
3905 return ops[0].value;
3906 return NULL_RTX;
3907 }
3908
3909 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3910 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3911 otherwise return null. */
3912
3913 static rtx
3914 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3915 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3916 HOST_WIDE_INT align)
3917 {
3918 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3919
3920 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3921 target = NULL_RTX;
3922
3923 struct expand_operand ops[5];
3924 create_output_operand (&ops[0], target, insn_mode);
3925 create_fixed_operand (&ops[1], arg1_rtx);
3926 create_fixed_operand (&ops[2], arg2_rtx);
3927 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3928 TYPE_UNSIGNED (arg3_type));
3929 create_integer_operand (&ops[4], align);
3930 if (maybe_expand_insn (icode, 5, ops))
3931 return ops[0].value;
3932 return NULL_RTX;
3933 }
3934
3935 /* Expand expression EXP, which is a call to the memcmp built-in function.
3936 Return NULL_RTX if we failed and the caller should emit a normal call,
3937 otherwise try to get the result in TARGET, if convenient. */
3938
3939 static rtx
3940 expand_builtin_memcmp (tree exp, rtx target)
3941 {
3942 if (!validate_arglist (exp,
3943 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3944 return NULL_RTX;
3945
3946 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3947 implementing memcmp because it will stop if it encounters two
3948 zero bytes. */
3949 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3950 if (icode == CODE_FOR_nothing)
3951 return NULL_RTX;
3952
3953 tree arg1 = CALL_EXPR_ARG (exp, 0);
3954 tree arg2 = CALL_EXPR_ARG (exp, 1);
3955 tree len = CALL_EXPR_ARG (exp, 2);
3956
3957 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3958 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3959
3960 /* If we don't have POINTER_TYPE, call the function. */
3961 if (arg1_align == 0 || arg2_align == 0)
3962 return NULL_RTX;
3963
3964 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3965 location_t loc = EXPR_LOCATION (exp);
3966 rtx arg1_rtx = get_memory_rtx (arg1, len);
3967 rtx arg2_rtx = get_memory_rtx (arg2, len);
3968 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3969
3970 /* Set MEM_SIZE as appropriate. */
3971 if (CONST_INT_P (arg3_rtx))
3972 {
3973 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3974 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3975 }
3976
3977 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3978 TREE_TYPE (len), arg3_rtx,
3979 MIN (arg1_align, arg2_align));
3980 if (result)
3981 {
3982 /* Return the value in the proper mode for this function. */
3983 if (GET_MODE (result) == mode)
3984 return result;
3985
3986 if (target != 0)
3987 {
3988 convert_move (target, result, 0);
3989 return target;
3990 }
3991
3992 return convert_to_mode (mode, result, 0);
3993 }
3994
3995 result = target;
3996 if (! (result != 0
3997 && REG_P (result) && GET_MODE (result) == mode
3998 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3999 result = gen_reg_rtx (mode);
4000
4001 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4002 TYPE_MODE (integer_type_node), 3,
4003 XEXP (arg1_rtx, 0), Pmode,
4004 XEXP (arg2_rtx, 0), Pmode,
4005 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4006 TYPE_UNSIGNED (sizetype)),
4007 TYPE_MODE (sizetype));
4008 return result;
4009 }
4010
4011 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4012 if we failed the caller should emit a normal call, otherwise try to get
4013 the result in TARGET, if convenient. */
4014
4015 static rtx
4016 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4017 {
4018 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4019 return NULL_RTX;
4020
4021 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4022 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4023 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4024 {
4025 rtx arg1_rtx, arg2_rtx;
4026 tree fndecl, fn;
4027 tree arg1 = CALL_EXPR_ARG (exp, 0);
4028 tree arg2 = CALL_EXPR_ARG (exp, 1);
4029 rtx result = NULL_RTX;
4030
4031 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4032 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4033
4034 /* If we don't have POINTER_TYPE, call the function. */
4035 if (arg1_align == 0 || arg2_align == 0)
4036 return NULL_RTX;
4037
4038 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4039 arg1 = builtin_save_expr (arg1);
4040 arg2 = builtin_save_expr (arg2);
4041
4042 arg1_rtx = get_memory_rtx (arg1, NULL);
4043 arg2_rtx = get_memory_rtx (arg2, NULL);
4044
4045 /* Try to call cmpstrsi. */
4046 if (cmpstr_icode != CODE_FOR_nothing)
4047 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4048 MIN (arg1_align, arg2_align));
4049
4050 /* Try to determine at least one length and call cmpstrnsi. */
4051 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4052 {
4053 tree len;
4054 rtx arg3_rtx;
4055
4056 tree len1 = c_strlen (arg1, 1);
4057 tree len2 = c_strlen (arg2, 1);
4058
4059 if (len1)
4060 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4061 if (len2)
4062 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4063
4064 /* If we don't have a constant length for the first, use the length
4065 of the second, if we know it. We don't require a constant for
4066 this case; some cost analysis could be done if both are available
4067 but neither is constant. For now, assume they're equally cheap,
4068 unless one has side effects. If both strings have constant lengths,
4069 use the smaller. */
4070
4071 if (!len1)
4072 len = len2;
4073 else if (!len2)
4074 len = len1;
4075 else if (TREE_SIDE_EFFECTS (len1))
4076 len = len2;
4077 else if (TREE_SIDE_EFFECTS (len2))
4078 len = len1;
4079 else if (TREE_CODE (len1) != INTEGER_CST)
4080 len = len2;
4081 else if (TREE_CODE (len2) != INTEGER_CST)
4082 len = len1;
4083 else if (tree_int_cst_lt (len1, len2))
4084 len = len1;
4085 else
4086 len = len2;
4087
4088 /* If both arguments have side effects, we cannot optimize. */
4089 if (len && !TREE_SIDE_EFFECTS (len))
4090 {
4091 arg3_rtx = expand_normal (len);
4092 result = expand_cmpstrn_or_cmpmem
4093 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4094 arg3_rtx, MIN (arg1_align, arg2_align));
4095 }
4096 }
4097
4098 if (result)
4099 {
4100 /* Return the value in the proper mode for this function. */
4101 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4102 if (GET_MODE (result) == mode)
4103 return result;
4104 if (target == 0)
4105 return convert_to_mode (mode, result, 0);
4106 convert_move (target, result, 0);
4107 return target;
4108 }
4109
4110 /* Expand the library call ourselves using a stabilized argument
4111 list to avoid re-evaluating the function's arguments twice. */
4112 fndecl = get_callee_fndecl (exp);
4113 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4114 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4115 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4116 return expand_call (fn, target, target == const0_rtx);
4117 }
4118 return NULL_RTX;
4119 }
4120
4121 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4122 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4123 the result in TARGET, if convenient. */
4124
4125 static rtx
4126 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4127 ATTRIBUTE_UNUSED machine_mode mode)
4128 {
4129 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4130
4131 if (!validate_arglist (exp,
4132 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4133 return NULL_RTX;
4134
4135 /* If c_strlen can determine an expression for one of the string
4136 lengths, and it doesn't have side effects, then emit cmpstrnsi
4137 using length MIN(strlen(string)+1, arg3). */
4138 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4139 if (cmpstrn_icode != CODE_FOR_nothing)
4140 {
4141 tree len, len1, len2;
4142 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4143 rtx result;
4144 tree fndecl, fn;
4145 tree arg1 = CALL_EXPR_ARG (exp, 0);
4146 tree arg2 = CALL_EXPR_ARG (exp, 1);
4147 tree arg3 = CALL_EXPR_ARG (exp, 2);
4148
4149 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4150 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4151
4152 len1 = c_strlen (arg1, 1);
4153 len2 = c_strlen (arg2, 1);
4154
4155 if (len1)
4156 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4157 if (len2)
4158 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4159
4160 /* If we don't have a constant length for the first, use the length
4161 of the second, if we know it. We don't require a constant for
4162 this case; some cost analysis could be done if both are available
4163 but neither is constant. For now, assume they're equally cheap,
4164 unless one has side effects. If both strings have constant lengths,
4165 use the smaller. */
4166
4167 if (!len1)
4168 len = len2;
4169 else if (!len2)
4170 len = len1;
4171 else if (TREE_SIDE_EFFECTS (len1))
4172 len = len2;
4173 else if (TREE_SIDE_EFFECTS (len2))
4174 len = len1;
4175 else if (TREE_CODE (len1) != INTEGER_CST)
4176 len = len2;
4177 else if (TREE_CODE (len2) != INTEGER_CST)
4178 len = len1;
4179 else if (tree_int_cst_lt (len1, len2))
4180 len = len1;
4181 else
4182 len = len2;
4183
4184 /* If both arguments have side effects, we cannot optimize. */
4185 if (!len || TREE_SIDE_EFFECTS (len))
4186 return NULL_RTX;
4187
4188 /* The actual new length parameter is MIN(len,arg3). */
4189 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4190 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4191
4192 /* If we don't have POINTER_TYPE, call the function. */
4193 if (arg1_align == 0 || arg2_align == 0)
4194 return NULL_RTX;
4195
4196 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4197 arg1 = builtin_save_expr (arg1);
4198 arg2 = builtin_save_expr (arg2);
4199 len = builtin_save_expr (len);
4200
4201 arg1_rtx = get_memory_rtx (arg1, len);
4202 arg2_rtx = get_memory_rtx (arg2, len);
4203 arg3_rtx = expand_normal (len);
4204 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4205 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4206 MIN (arg1_align, arg2_align));
4207 if (result)
4208 {
4209 /* Return the value in the proper mode for this function. */
4210 mode = TYPE_MODE (TREE_TYPE (exp));
4211 if (GET_MODE (result) == mode)
4212 return result;
4213 if (target == 0)
4214 return convert_to_mode (mode, result, 0);
4215 convert_move (target, result, 0);
4216 return target;
4217 }
4218
4219 /* Expand the library call ourselves using a stabilized argument
4220 list to avoid re-evaluating the function's arguments twice. */
4221 fndecl = get_callee_fndecl (exp);
4222 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4223 arg1, arg2, len);
4224 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4225 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4226 return expand_call (fn, target, target == const0_rtx);
4227 }
4228 return NULL_RTX;
4229 }
4230
4231 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4232 if that's convenient. */
4233
4234 rtx
4235 expand_builtin_saveregs (void)
4236 {
4237 rtx val;
4238 rtx_insn *seq;
4239
4240 /* Don't do __builtin_saveregs more than once in a function.
4241 Save the result of the first call and reuse it. */
4242 if (saveregs_value != 0)
4243 return saveregs_value;
4244
4245 /* When this function is called, it means that registers must be
4246 saved on entry to this function. So we migrate the call to the
4247 first insn of this function. */
4248
4249 start_sequence ();
4250
4251 /* Do whatever the machine needs done in this case. */
4252 val = targetm.calls.expand_builtin_saveregs ();
4253
4254 seq = get_insns ();
4255 end_sequence ();
4256
4257 saveregs_value = val;
4258
4259 /* Put the insns after the NOTE that starts the function. If this
4260 is inside a start_sequence, make the outer-level insn chain current, so
4261 the code is placed at the start of the function. */
4262 push_topmost_sequence ();
4263 emit_insn_after (seq, entry_of_function ());
4264 pop_topmost_sequence ();
4265
4266 return val;
4267 }
4268
4269 /* Expand a call to __builtin_next_arg. */
4270
4271 static rtx
4272 expand_builtin_next_arg (void)
4273 {
4274 /* Checking arguments is already done in fold_builtin_next_arg
4275 that must be called before this function. */
4276 return expand_binop (ptr_mode, add_optab,
4277 crtl->args.internal_arg_pointer,
4278 crtl->args.arg_offset_rtx,
4279 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4280 }
4281
4282 /* Make it easier for the backends by protecting the valist argument
4283 from multiple evaluations. */
4284
4285 static tree
4286 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4287 {
4288 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4289
4290 /* The current way of determining the type of valist is completely
4291 bogus. We should have the information on the va builtin instead. */
4292 if (!vatype)
4293 vatype = targetm.fn_abi_va_list (cfun->decl);
4294
4295 if (TREE_CODE (vatype) == ARRAY_TYPE)
4296 {
4297 if (TREE_SIDE_EFFECTS (valist))
4298 valist = save_expr (valist);
4299
4300 /* For this case, the backends will be expecting a pointer to
4301 vatype, but it's possible we've actually been given an array
4302 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4303 So fix it. */
4304 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4305 {
4306 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4307 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4308 }
4309 }
4310 else
4311 {
4312 tree pt = build_pointer_type (vatype);
4313
4314 if (! needs_lvalue)
4315 {
4316 if (! TREE_SIDE_EFFECTS (valist))
4317 return valist;
4318
4319 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4320 TREE_SIDE_EFFECTS (valist) = 1;
4321 }
4322
4323 if (TREE_SIDE_EFFECTS (valist))
4324 valist = save_expr (valist);
4325 valist = fold_build2_loc (loc, MEM_REF,
4326 vatype, valist, build_int_cst (pt, 0));
4327 }
4328
4329 return valist;
4330 }
4331
4332 /* The "standard" definition of va_list is void*. */
4333
4334 tree
4335 std_build_builtin_va_list (void)
4336 {
4337 return ptr_type_node;
4338 }
4339
4340 /* The "standard" abi va_list is va_list_type_node. */
4341
4342 tree
4343 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4344 {
4345 return va_list_type_node;
4346 }
4347
4348 /* The "standard" type of va_list is va_list_type_node. */
4349
4350 tree
4351 std_canonical_va_list_type (tree type)
4352 {
4353 tree wtype, htype;
4354
4355 if (INDIRECT_REF_P (type))
4356 type = TREE_TYPE (type);
4357 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4358 type = TREE_TYPE (type);
4359 wtype = va_list_type_node;
4360 htype = type;
4361 /* Treat structure va_list types. */
4362 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4363 htype = TREE_TYPE (htype);
4364 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4365 {
4366 /* If va_list is an array type, the argument may have decayed
4367 to a pointer type, e.g. by being passed to another function.
4368 In that case, unwrap both types so that we can compare the
4369 underlying records. */
4370 if (TREE_CODE (htype) == ARRAY_TYPE
4371 || POINTER_TYPE_P (htype))
4372 {
4373 wtype = TREE_TYPE (wtype);
4374 htype = TREE_TYPE (htype);
4375 }
4376 }
4377 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4378 return va_list_type_node;
4379
4380 return NULL_TREE;
4381 }
4382
4383 /* The "standard" implementation of va_start: just assign `nextarg' to
4384 the variable. */
4385
4386 void
4387 std_expand_builtin_va_start (tree valist, rtx nextarg)
4388 {
4389 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4390 convert_move (va_r, nextarg, 0);
4391
4392 /* We do not have any valid bounds for the pointer, so
4393 just store zero bounds for it. */
4394 if (chkp_function_instrumented_p (current_function_decl))
4395 chkp_expand_bounds_reset_for_mem (valist,
4396 make_tree (TREE_TYPE (valist),
4397 nextarg));
4398 }
4399
4400 /* Expand EXP, a call to __builtin_va_start. */
4401
4402 static rtx
4403 expand_builtin_va_start (tree exp)
4404 {
4405 rtx nextarg;
4406 tree valist;
4407 location_t loc = EXPR_LOCATION (exp);
4408
4409 if (call_expr_nargs (exp) < 2)
4410 {
4411 error_at (loc, "too few arguments to function %<va_start%>");
4412 return const0_rtx;
4413 }
4414
4415 if (fold_builtin_next_arg (exp, true))
4416 return const0_rtx;
4417
4418 nextarg = expand_builtin_next_arg ();
4419 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4420
4421 if (targetm.expand_builtin_va_start)
4422 targetm.expand_builtin_va_start (valist, nextarg);
4423 else
4424 std_expand_builtin_va_start (valist, nextarg);
4425
4426 return const0_rtx;
4427 }
4428
4429 /* Expand EXP, a call to __builtin_va_end. */
4430
4431 static rtx
4432 expand_builtin_va_end (tree exp)
4433 {
4434 tree valist = CALL_EXPR_ARG (exp, 0);
4435
4436 /* Evaluate for side effects, if needed. I hate macros that don't
4437 do that. */
4438 if (TREE_SIDE_EFFECTS (valist))
4439 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4440
4441 return const0_rtx;
4442 }
4443
4444 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4445 builtin rather than just as an assignment in stdarg.h because of the
4446 nastiness of array-type va_list types. */
4447
4448 static rtx
4449 expand_builtin_va_copy (tree exp)
4450 {
4451 tree dst, src, t;
4452 location_t loc = EXPR_LOCATION (exp);
4453
4454 dst = CALL_EXPR_ARG (exp, 0);
4455 src = CALL_EXPR_ARG (exp, 1);
4456
4457 dst = stabilize_va_list_loc (loc, dst, 1);
4458 src = stabilize_va_list_loc (loc, src, 0);
4459
4460 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4461
4462 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4463 {
4464 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4465 TREE_SIDE_EFFECTS (t) = 1;
4466 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4467 }
4468 else
4469 {
4470 rtx dstb, srcb, size;
4471
4472 /* Evaluate to pointers. */
4473 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4474 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4475 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4476 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4477
4478 dstb = convert_memory_address (Pmode, dstb);
4479 srcb = convert_memory_address (Pmode, srcb);
4480
4481 /* "Dereference" to BLKmode memories. */
4482 dstb = gen_rtx_MEM (BLKmode, dstb);
4483 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4484 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4485 srcb = gen_rtx_MEM (BLKmode, srcb);
4486 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4487 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4488
4489 /* Copy. */
4490 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4491 }
4492
4493 return const0_rtx;
4494 }
4495
4496 /* Expand a call to one of the builtin functions __builtin_frame_address or
4497 __builtin_return_address. */
4498
4499 static rtx
4500 expand_builtin_frame_address (tree fndecl, tree exp)
4501 {
4502 /* The argument must be a nonnegative integer constant.
4503 It counts the number of frames to scan up the stack.
4504 The value is either the frame pointer value or the return
4505 address saved in that frame. */
4506 if (call_expr_nargs (exp) == 0)
4507 /* Warning about missing arg was already issued. */
4508 return const0_rtx;
4509 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4510 {
4511 error ("invalid argument to %qD", fndecl);
4512 return const0_rtx;
4513 }
4514 else
4515 {
4516 /* Number of frames to scan up the stack. */
4517 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4518
4519 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4520
4521 /* Some ports cannot access arbitrary stack frames. */
4522 if (tem == NULL)
4523 {
4524 warning (0, "unsupported argument to %qD", fndecl);
4525 return const0_rtx;
4526 }
4527
4528 if (count)
4529 {
4530 /* Warn since no effort is made to ensure that any frame
4531 beyond the current one exists or can be safely reached. */
4532 warning (OPT_Wframe_address, "calling %qD with "
4533 "a nonzero argument is unsafe", fndecl);
4534 }
4535
4536 /* For __builtin_frame_address, return what we've got. */
4537 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4538 return tem;
4539
4540 if (!REG_P (tem)
4541 && ! CONSTANT_P (tem))
4542 tem = copy_addr_to_reg (tem);
4543 return tem;
4544 }
4545 }
4546
4547 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4548 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4549 is the same as for allocate_dynamic_stack_space. */
4550
4551 static rtx
4552 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4553 {
4554 rtx op0;
4555 rtx result;
4556 bool valid_arglist;
4557 unsigned int align;
4558 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4559 == BUILT_IN_ALLOCA_WITH_ALIGN);
4560
4561 valid_arglist
4562 = (alloca_with_align
4563 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4564 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4565
4566 if (!valid_arglist)
4567 return NULL_RTX;
4568
4569 /* Compute the argument. */
4570 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4571
4572 /* Compute the alignment. */
4573 align = (alloca_with_align
4574 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4575 : BIGGEST_ALIGNMENT);
4576
4577 /* Allocate the desired space. */
4578 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4579 result = convert_memory_address (ptr_mode, result);
4580
4581 return result;
4582 }
4583
4584 /* Expand a call to bswap builtin in EXP.
4585 Return NULL_RTX if a normal call should be emitted rather than expanding the
4586 function in-line. If convenient, the result should be placed in TARGET.
4587 SUBTARGET may be used as the target for computing one of EXP's operands. */
4588
4589 static rtx
4590 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4591 rtx subtarget)
4592 {
4593 tree arg;
4594 rtx op0;
4595
4596 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4597 return NULL_RTX;
4598
4599 arg = CALL_EXPR_ARG (exp, 0);
4600 op0 = expand_expr (arg,
4601 subtarget && GET_MODE (subtarget) == target_mode
4602 ? subtarget : NULL_RTX,
4603 target_mode, EXPAND_NORMAL);
4604 if (GET_MODE (op0) != target_mode)
4605 op0 = convert_to_mode (target_mode, op0, 1);
4606
4607 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4608
4609 gcc_assert (target);
4610
4611 return convert_to_mode (target_mode, target, 1);
4612 }
4613
4614 /* Expand a call to a unary builtin in EXP.
4615 Return NULL_RTX if a normal call should be emitted rather than expanding the
4616 function in-line. If convenient, the result should be placed in TARGET.
4617 SUBTARGET may be used as the target for computing one of EXP's operands. */
4618
4619 static rtx
4620 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4621 rtx subtarget, optab op_optab)
4622 {
4623 rtx op0;
4624
4625 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4626 return NULL_RTX;
4627
4628 /* Compute the argument. */
4629 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4630 (subtarget
4631 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4632 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4633 VOIDmode, EXPAND_NORMAL);
4634 /* Compute op, into TARGET if possible.
4635 Set TARGET to wherever the result comes back. */
4636 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4637 op_optab, op0, target, op_optab != clrsb_optab);
4638 gcc_assert (target);
4639
4640 return convert_to_mode (target_mode, target, 0);
4641 }
4642
4643 /* Expand a call to __builtin_expect. We just return our argument
4644 as the builtin_expect semantic should've been already executed by
4645 tree branch prediction pass. */
4646
4647 static rtx
4648 expand_builtin_expect (tree exp, rtx target)
4649 {
4650 tree arg;
4651
4652 if (call_expr_nargs (exp) < 2)
4653 return const0_rtx;
4654 arg = CALL_EXPR_ARG (exp, 0);
4655
4656 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4657 /* When guessing was done, the hints should be already stripped away. */
4658 gcc_assert (!flag_guess_branch_prob
4659 || optimize == 0 || seen_error ());
4660 return target;
4661 }
4662
4663 /* Expand a call to __builtin_assume_aligned. We just return our first
4664 argument as the builtin_assume_aligned semantic should've been already
4665 executed by CCP. */
4666
4667 static rtx
4668 expand_builtin_assume_aligned (tree exp, rtx target)
4669 {
4670 if (call_expr_nargs (exp) < 2)
4671 return const0_rtx;
4672 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4673 EXPAND_NORMAL);
4674 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4675 && (call_expr_nargs (exp) < 3
4676 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4677 return target;
4678 }
4679
4680 void
4681 expand_builtin_trap (void)
4682 {
4683 if (targetm.have_trap ())
4684 {
4685 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4686 /* For trap insns when not accumulating outgoing args force
4687 REG_ARGS_SIZE note to prevent crossjumping of calls with
4688 different args sizes. */
4689 if (!ACCUMULATE_OUTGOING_ARGS)
4690 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4691 }
4692 else
4693 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4694 emit_barrier ();
4695 }
4696
4697 /* Expand a call to __builtin_unreachable. We do nothing except emit
4698 a barrier saying that control flow will not pass here.
4699
4700 It is the responsibility of the program being compiled to ensure
4701 that control flow does never reach __builtin_unreachable. */
4702 static void
4703 expand_builtin_unreachable (void)
4704 {
4705 emit_barrier ();
4706 }
4707
4708 /* Expand EXP, a call to fabs, fabsf or fabsl.
4709 Return NULL_RTX if a normal call should be emitted rather than expanding
4710 the function inline. If convenient, the result should be placed
4711 in TARGET. SUBTARGET may be used as the target for computing
4712 the operand. */
4713
4714 static rtx
4715 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4716 {
4717 machine_mode mode;
4718 tree arg;
4719 rtx op0;
4720
4721 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4722 return NULL_RTX;
4723
4724 arg = CALL_EXPR_ARG (exp, 0);
4725 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4726 mode = TYPE_MODE (TREE_TYPE (arg));
4727 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4728 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4729 }
4730
4731 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4732 Return NULL is a normal call should be emitted rather than expanding the
4733 function inline. If convenient, the result should be placed in TARGET.
4734 SUBTARGET may be used as the target for computing the operand. */
4735
4736 static rtx
4737 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4738 {
4739 rtx op0, op1;
4740 tree arg;
4741
4742 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4743 return NULL_RTX;
4744
4745 arg = CALL_EXPR_ARG (exp, 0);
4746 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4747
4748 arg = CALL_EXPR_ARG (exp, 1);
4749 op1 = expand_normal (arg);
4750
4751 return expand_copysign (op0, op1, target);
4752 }
4753
4754 /* Expand a call to __builtin___clear_cache. */
4755
4756 static rtx
4757 expand_builtin___clear_cache (tree exp)
4758 {
4759 if (!targetm.code_for_clear_cache)
4760 {
4761 #ifdef CLEAR_INSN_CACHE
4762 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4763 does something. Just do the default expansion to a call to
4764 __clear_cache(). */
4765 return NULL_RTX;
4766 #else
4767 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4768 does nothing. There is no need to call it. Do nothing. */
4769 return const0_rtx;
4770 #endif /* CLEAR_INSN_CACHE */
4771 }
4772
4773 /* We have a "clear_cache" insn, and it will handle everything. */
4774 tree begin, end;
4775 rtx begin_rtx, end_rtx;
4776
4777 /* We must not expand to a library call. If we did, any
4778 fallback library function in libgcc that might contain a call to
4779 __builtin___clear_cache() would recurse infinitely. */
4780 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4781 {
4782 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4783 return const0_rtx;
4784 }
4785
4786 if (targetm.have_clear_cache ())
4787 {
4788 struct expand_operand ops[2];
4789
4790 begin = CALL_EXPR_ARG (exp, 0);
4791 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4792
4793 end = CALL_EXPR_ARG (exp, 1);
4794 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4795
4796 create_address_operand (&ops[0], begin_rtx);
4797 create_address_operand (&ops[1], end_rtx);
4798 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4799 return const0_rtx;
4800 }
4801 return const0_rtx;
4802 }
4803
4804 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4805
4806 static rtx
4807 round_trampoline_addr (rtx tramp)
4808 {
4809 rtx temp, addend, mask;
4810
4811 /* If we don't need too much alignment, we'll have been guaranteed
4812 proper alignment by get_trampoline_type. */
4813 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4814 return tramp;
4815
4816 /* Round address up to desired boundary. */
4817 temp = gen_reg_rtx (Pmode);
4818 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4819 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4820
4821 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4822 temp, 0, OPTAB_LIB_WIDEN);
4823 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4824 temp, 0, OPTAB_LIB_WIDEN);
4825
4826 return tramp;
4827 }
4828
4829 static rtx
4830 expand_builtin_init_trampoline (tree exp, bool onstack)
4831 {
4832 tree t_tramp, t_func, t_chain;
4833 rtx m_tramp, r_tramp, r_chain, tmp;
4834
4835 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4836 POINTER_TYPE, VOID_TYPE))
4837 return NULL_RTX;
4838
4839 t_tramp = CALL_EXPR_ARG (exp, 0);
4840 t_func = CALL_EXPR_ARG (exp, 1);
4841 t_chain = CALL_EXPR_ARG (exp, 2);
4842
4843 r_tramp = expand_normal (t_tramp);
4844 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4845 MEM_NOTRAP_P (m_tramp) = 1;
4846
4847 /* If ONSTACK, the TRAMP argument should be the address of a field
4848 within the local function's FRAME decl. Either way, let's see if
4849 we can fill in the MEM_ATTRs for this memory. */
4850 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4851 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4852
4853 /* Creator of a heap trampoline is responsible for making sure the
4854 address is aligned to at least STACK_BOUNDARY. Normally malloc
4855 will ensure this anyhow. */
4856 tmp = round_trampoline_addr (r_tramp);
4857 if (tmp != r_tramp)
4858 {
4859 m_tramp = change_address (m_tramp, BLKmode, tmp);
4860 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4861 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4862 }
4863
4864 /* The FUNC argument should be the address of the nested function.
4865 Extract the actual function decl to pass to the hook. */
4866 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4867 t_func = TREE_OPERAND (t_func, 0);
4868 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4869
4870 r_chain = expand_normal (t_chain);
4871
4872 /* Generate insns to initialize the trampoline. */
4873 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4874
4875 if (onstack)
4876 {
4877 trampolines_created = 1;
4878
4879 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4880 "trampoline generated for nested function %qD", t_func);
4881 }
4882
4883 return const0_rtx;
4884 }
4885
4886 static rtx
4887 expand_builtin_adjust_trampoline (tree exp)
4888 {
4889 rtx tramp;
4890
4891 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4892 return NULL_RTX;
4893
4894 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4895 tramp = round_trampoline_addr (tramp);
4896 if (targetm.calls.trampoline_adjust_address)
4897 tramp = targetm.calls.trampoline_adjust_address (tramp);
4898
4899 return tramp;
4900 }
4901
4902 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4903 function. The function first checks whether the back end provides
4904 an insn to implement signbit for the respective mode. If not, it
4905 checks whether the floating point format of the value is such that
4906 the sign bit can be extracted. If that is not the case, error out.
4907 EXP is the expression that is a call to the builtin function; if
4908 convenient, the result should be placed in TARGET. */
4909 static rtx
4910 expand_builtin_signbit (tree exp, rtx target)
4911 {
4912 const struct real_format *fmt;
4913 machine_mode fmode, imode, rmode;
4914 tree arg;
4915 int word, bitpos;
4916 enum insn_code icode;
4917 rtx temp;
4918 location_t loc = EXPR_LOCATION (exp);
4919
4920 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4921 return NULL_RTX;
4922
4923 arg = CALL_EXPR_ARG (exp, 0);
4924 fmode = TYPE_MODE (TREE_TYPE (arg));
4925 rmode = TYPE_MODE (TREE_TYPE (exp));
4926 fmt = REAL_MODE_FORMAT (fmode);
4927
4928 arg = builtin_save_expr (arg);
4929
4930 /* Expand the argument yielding a RTX expression. */
4931 temp = expand_normal (arg);
4932
4933 /* Check if the back end provides an insn that handles signbit for the
4934 argument's mode. */
4935 icode = optab_handler (signbit_optab, fmode);
4936 if (icode != CODE_FOR_nothing)
4937 {
4938 rtx_insn *last = get_last_insn ();
4939 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4940 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4941 return target;
4942 delete_insns_since (last);
4943 }
4944
4945 /* For floating point formats without a sign bit, implement signbit
4946 as "ARG < 0.0". */
4947 bitpos = fmt->signbit_ro;
4948 if (bitpos < 0)
4949 {
4950 /* But we can't do this if the format supports signed zero. */
4951 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4952
4953 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4954 build_real (TREE_TYPE (arg), dconst0));
4955 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4956 }
4957
4958 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4959 {
4960 imode = int_mode_for_mode (fmode);
4961 gcc_assert (imode != BLKmode);
4962 temp = gen_lowpart (imode, temp);
4963 }
4964 else
4965 {
4966 imode = word_mode;
4967 /* Handle targets with different FP word orders. */
4968 if (FLOAT_WORDS_BIG_ENDIAN)
4969 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4970 else
4971 word = bitpos / BITS_PER_WORD;
4972 temp = operand_subword_force (temp, word, fmode);
4973 bitpos = bitpos % BITS_PER_WORD;
4974 }
4975
4976 /* Force the intermediate word_mode (or narrower) result into a
4977 register. This avoids attempting to create paradoxical SUBREGs
4978 of floating point modes below. */
4979 temp = force_reg (imode, temp);
4980
4981 /* If the bitpos is within the "result mode" lowpart, the operation
4982 can be implement with a single bitwise AND. Otherwise, we need
4983 a right shift and an AND. */
4984
4985 if (bitpos < GET_MODE_BITSIZE (rmode))
4986 {
4987 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4988
4989 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4990 temp = gen_lowpart (rmode, temp);
4991 temp = expand_binop (rmode, and_optab, temp,
4992 immed_wide_int_const (mask, rmode),
4993 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4994 }
4995 else
4996 {
4997 /* Perform a logical right shift to place the signbit in the least
4998 significant bit, then truncate the result to the desired mode
4999 and mask just this bit. */
5000 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5001 temp = gen_lowpart (rmode, temp);
5002 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5003 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5004 }
5005
5006 return temp;
5007 }
5008
5009 /* Expand fork or exec calls. TARGET is the desired target of the
5010 call. EXP is the call. FN is the
5011 identificator of the actual function. IGNORE is nonzero if the
5012 value is to be ignored. */
5013
5014 static rtx
5015 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5016 {
5017 tree id, decl;
5018 tree call;
5019
5020 /* If we are not profiling, just call the function. */
5021 if (!profile_arc_flag)
5022 return NULL_RTX;
5023
5024 /* Otherwise call the wrapper. This should be equivalent for the rest of
5025 compiler, so the code does not diverge, and the wrapper may run the
5026 code necessary for keeping the profiling sane. */
5027
5028 switch (DECL_FUNCTION_CODE (fn))
5029 {
5030 case BUILT_IN_FORK:
5031 id = get_identifier ("__gcov_fork");
5032 break;
5033
5034 case BUILT_IN_EXECL:
5035 id = get_identifier ("__gcov_execl");
5036 break;
5037
5038 case BUILT_IN_EXECV:
5039 id = get_identifier ("__gcov_execv");
5040 break;
5041
5042 case BUILT_IN_EXECLP:
5043 id = get_identifier ("__gcov_execlp");
5044 break;
5045
5046 case BUILT_IN_EXECLE:
5047 id = get_identifier ("__gcov_execle");
5048 break;
5049
5050 case BUILT_IN_EXECVP:
5051 id = get_identifier ("__gcov_execvp");
5052 break;
5053
5054 case BUILT_IN_EXECVE:
5055 id = get_identifier ("__gcov_execve");
5056 break;
5057
5058 default:
5059 gcc_unreachable ();
5060 }
5061
5062 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5063 FUNCTION_DECL, id, TREE_TYPE (fn));
5064 DECL_EXTERNAL (decl) = 1;
5065 TREE_PUBLIC (decl) = 1;
5066 DECL_ARTIFICIAL (decl) = 1;
5067 TREE_NOTHROW (decl) = 1;
5068 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5069 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5070 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5071 return expand_call (call, target, ignore);
5072 }
5073
5074
5075 \f
5076 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5077 the pointer in these functions is void*, the tree optimizers may remove
5078 casts. The mode computed in expand_builtin isn't reliable either, due
5079 to __sync_bool_compare_and_swap.
5080
5081 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5082 group of builtins. This gives us log2 of the mode size. */
5083
5084 static inline machine_mode
5085 get_builtin_sync_mode (int fcode_diff)
5086 {
5087 /* The size is not negotiable, so ask not to get BLKmode in return
5088 if the target indicates that a smaller size would be better. */
5089 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5090 }
5091
5092 /* Expand the memory expression LOC and return the appropriate memory operand
5093 for the builtin_sync operations. */
5094
5095 static rtx
5096 get_builtin_sync_mem (tree loc, machine_mode mode)
5097 {
5098 rtx addr, mem;
5099
5100 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5101 addr = convert_memory_address (Pmode, addr);
5102
5103 /* Note that we explicitly do not want any alias information for this
5104 memory, so that we kill all other live memories. Otherwise we don't
5105 satisfy the full barrier semantics of the intrinsic. */
5106 mem = validize_mem (gen_rtx_MEM (mode, addr));
5107
5108 /* The alignment needs to be at least according to that of the mode. */
5109 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5110 get_pointer_alignment (loc)));
5111 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5112 MEM_VOLATILE_P (mem) = 1;
5113
5114 return mem;
5115 }
5116
5117 /* Make sure an argument is in the right mode.
5118 EXP is the tree argument.
5119 MODE is the mode it should be in. */
5120
5121 static rtx
5122 expand_expr_force_mode (tree exp, machine_mode mode)
5123 {
5124 rtx val;
5125 machine_mode old_mode;
5126
5127 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5128 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5129 of CONST_INTs, where we know the old_mode only from the call argument. */
5130
5131 old_mode = GET_MODE (val);
5132 if (old_mode == VOIDmode)
5133 old_mode = TYPE_MODE (TREE_TYPE (exp));
5134 val = convert_modes (mode, old_mode, val, 1);
5135 return val;
5136 }
5137
5138
5139 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5140 EXP is the CALL_EXPR. CODE is the rtx code
5141 that corresponds to the arithmetic or logical operation from the name;
5142 an exception here is that NOT actually means NAND. TARGET is an optional
5143 place for us to store the results; AFTER is true if this is the
5144 fetch_and_xxx form. */
5145
5146 static rtx
5147 expand_builtin_sync_operation (machine_mode mode, tree exp,
5148 enum rtx_code code, bool after,
5149 rtx target)
5150 {
5151 rtx val, mem;
5152 location_t loc = EXPR_LOCATION (exp);
5153
5154 if (code == NOT && warn_sync_nand)
5155 {
5156 tree fndecl = get_callee_fndecl (exp);
5157 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5158
5159 static bool warned_f_a_n, warned_n_a_f;
5160
5161 switch (fcode)
5162 {
5163 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5164 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5165 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5166 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5167 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5168 if (warned_f_a_n)
5169 break;
5170
5171 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5172 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5173 warned_f_a_n = true;
5174 break;
5175
5176 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5177 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5178 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5179 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5180 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5181 if (warned_n_a_f)
5182 break;
5183
5184 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5185 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5186 warned_n_a_f = true;
5187 break;
5188
5189 default:
5190 gcc_unreachable ();
5191 }
5192 }
5193
5194 /* Expand the operands. */
5195 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5196 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5197
5198 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5199 after);
5200 }
5201
5202 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5203 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5204 true if this is the boolean form. TARGET is a place for us to store the
5205 results; this is NOT optional if IS_BOOL is true. */
5206
5207 static rtx
5208 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5209 bool is_bool, rtx target)
5210 {
5211 rtx old_val, new_val, mem;
5212 rtx *pbool, *poval;
5213
5214 /* Expand the operands. */
5215 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5216 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5217 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5218
5219 pbool = poval = NULL;
5220 if (target != const0_rtx)
5221 {
5222 if (is_bool)
5223 pbool = &target;
5224 else
5225 poval = &target;
5226 }
5227 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5228 false, MEMMODEL_SYNC_SEQ_CST,
5229 MEMMODEL_SYNC_SEQ_CST))
5230 return NULL_RTX;
5231
5232 return target;
5233 }
5234
5235 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5236 general form is actually an atomic exchange, and some targets only
5237 support a reduced form with the second argument being a constant 1.
5238 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5239 the results. */
5240
5241 static rtx
5242 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5243 rtx target)
5244 {
5245 rtx val, mem;
5246
5247 /* Expand the operands. */
5248 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5249 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5250
5251 return expand_sync_lock_test_and_set (target, mem, val);
5252 }
5253
5254 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5255
5256 static void
5257 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5258 {
5259 rtx mem;
5260
5261 /* Expand the operands. */
5262 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5263
5264 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5265 }
5266
5267 /* Given an integer representing an ``enum memmodel'', verify its
5268 correctness and return the memory model enum. */
5269
5270 static enum memmodel
5271 get_memmodel (tree exp)
5272 {
5273 rtx op;
5274 unsigned HOST_WIDE_INT val;
5275
5276 /* If the parameter is not a constant, it's a run time value so we'll just
5277 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5278 if (TREE_CODE (exp) != INTEGER_CST)
5279 return MEMMODEL_SEQ_CST;
5280
5281 op = expand_normal (exp);
5282
5283 val = INTVAL (op);
5284 if (targetm.memmodel_check)
5285 val = targetm.memmodel_check (val);
5286 else if (val & ~MEMMODEL_MASK)
5287 {
5288 warning (OPT_Winvalid_memory_model,
5289 "Unknown architecture specifier in memory model to builtin.");
5290 return MEMMODEL_SEQ_CST;
5291 }
5292
5293 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5294 if (memmodel_base (val) >= MEMMODEL_LAST)
5295 {
5296 warning (OPT_Winvalid_memory_model,
5297 "invalid memory model argument to builtin");
5298 return MEMMODEL_SEQ_CST;
5299 }
5300
5301 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5302 be conservative and promote consume to acquire. */
5303 if (val == MEMMODEL_CONSUME)
5304 val = MEMMODEL_ACQUIRE;
5305
5306 return (enum memmodel) val;
5307 }
5308
5309 /* Expand the __atomic_exchange intrinsic:
5310 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5311 EXP is the CALL_EXPR.
5312 TARGET is an optional place for us to store the results. */
5313
5314 static rtx
5315 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5316 {
5317 rtx val, mem;
5318 enum memmodel model;
5319
5320 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5321
5322 if (!flag_inline_atomics)
5323 return NULL_RTX;
5324
5325 /* Expand the operands. */
5326 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5327 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5328
5329 return expand_atomic_exchange (target, mem, val, model);
5330 }
5331
5332 /* Expand the __atomic_compare_exchange intrinsic:
5333 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5334 TYPE desired, BOOL weak,
5335 enum memmodel success,
5336 enum memmodel failure)
5337 EXP is the CALL_EXPR.
5338 TARGET is an optional place for us to store the results. */
5339
5340 static rtx
5341 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5342 rtx target)
5343 {
5344 rtx expect, desired, mem, oldval;
5345 rtx_code_label *label;
5346 enum memmodel success, failure;
5347 tree weak;
5348 bool is_weak;
5349
5350 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5351 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5352
5353 if (failure > success)
5354 {
5355 warning (OPT_Winvalid_memory_model,
5356 "failure memory model cannot be stronger than success memory "
5357 "model for %<__atomic_compare_exchange%>");
5358 success = MEMMODEL_SEQ_CST;
5359 }
5360
5361 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5362 {
5363 warning (OPT_Winvalid_memory_model,
5364 "invalid failure memory model for "
5365 "%<__atomic_compare_exchange%>");
5366 failure = MEMMODEL_SEQ_CST;
5367 success = MEMMODEL_SEQ_CST;
5368 }
5369
5370
5371 if (!flag_inline_atomics)
5372 return NULL_RTX;
5373
5374 /* Expand the operands. */
5375 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5376
5377 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5378 expect = convert_memory_address (Pmode, expect);
5379 expect = gen_rtx_MEM (mode, expect);
5380 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5381
5382 weak = CALL_EXPR_ARG (exp, 3);
5383 is_weak = false;
5384 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5385 is_weak = true;
5386
5387 if (target == const0_rtx)
5388 target = NULL;
5389
5390 /* Lest the rtl backend create a race condition with an imporoper store
5391 to memory, always create a new pseudo for OLDVAL. */
5392 oldval = NULL;
5393
5394 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5395 is_weak, success, failure))
5396 return NULL_RTX;
5397
5398 /* Conditionally store back to EXPECT, lest we create a race condition
5399 with an improper store to memory. */
5400 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5401 the normal case where EXPECT is totally private, i.e. a register. At
5402 which point the store can be unconditional. */
5403 label = gen_label_rtx ();
5404 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5405 GET_MODE (target), 1, label);
5406 emit_move_insn (expect, oldval);
5407 emit_label (label);
5408
5409 return target;
5410 }
5411
5412 /* Expand the __atomic_load intrinsic:
5413 TYPE __atomic_load (TYPE *object, enum memmodel)
5414 EXP is the CALL_EXPR.
5415 TARGET is an optional place for us to store the results. */
5416
5417 static rtx
5418 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5419 {
5420 rtx mem;
5421 enum memmodel model;
5422
5423 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5424 if (is_mm_release (model) || is_mm_acq_rel (model))
5425 {
5426 warning (OPT_Winvalid_memory_model,
5427 "invalid memory model for %<__atomic_load%>");
5428 model = MEMMODEL_SEQ_CST;
5429 }
5430
5431 if (!flag_inline_atomics)
5432 return NULL_RTX;
5433
5434 /* Expand the operand. */
5435 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5436
5437 return expand_atomic_load (target, mem, model);
5438 }
5439
5440
5441 /* Expand the __atomic_store intrinsic:
5442 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5443 EXP is the CALL_EXPR.
5444 TARGET is an optional place for us to store the results. */
5445
5446 static rtx
5447 expand_builtin_atomic_store (machine_mode mode, tree exp)
5448 {
5449 rtx mem, val;
5450 enum memmodel model;
5451
5452 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5453 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5454 || is_mm_release (model)))
5455 {
5456 warning (OPT_Winvalid_memory_model,
5457 "invalid memory model for %<__atomic_store%>");
5458 model = MEMMODEL_SEQ_CST;
5459 }
5460
5461 if (!flag_inline_atomics)
5462 return NULL_RTX;
5463
5464 /* Expand the operands. */
5465 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5466 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5467
5468 return expand_atomic_store (mem, val, model, false);
5469 }
5470
5471 /* Expand the __atomic_fetch_XXX intrinsic:
5472 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5473 EXP is the CALL_EXPR.
5474 TARGET is an optional place for us to store the results.
5475 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5476 FETCH_AFTER is true if returning the result of the operation.
5477 FETCH_AFTER is false if returning the value before the operation.
5478 IGNORE is true if the result is not used.
5479 EXT_CALL is the correct builtin for an external call if this cannot be
5480 resolved to an instruction sequence. */
5481
5482 static rtx
5483 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5484 enum rtx_code code, bool fetch_after,
5485 bool ignore, enum built_in_function ext_call)
5486 {
5487 rtx val, mem, ret;
5488 enum memmodel model;
5489 tree fndecl;
5490 tree addr;
5491
5492 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5493
5494 /* Expand the operands. */
5495 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5496 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5497
5498 /* Only try generating instructions if inlining is turned on. */
5499 if (flag_inline_atomics)
5500 {
5501 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5502 if (ret)
5503 return ret;
5504 }
5505
5506 /* Return if a different routine isn't needed for the library call. */
5507 if (ext_call == BUILT_IN_NONE)
5508 return NULL_RTX;
5509
5510 /* Change the call to the specified function. */
5511 fndecl = get_callee_fndecl (exp);
5512 addr = CALL_EXPR_FN (exp);
5513 STRIP_NOPS (addr);
5514
5515 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5516 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5517
5518 /* Expand the call here so we can emit trailing code. */
5519 ret = expand_call (exp, target, ignore);
5520
5521 /* Replace the original function just in case it matters. */
5522 TREE_OPERAND (addr, 0) = fndecl;
5523
5524 /* Then issue the arithmetic correction to return the right result. */
5525 if (!ignore)
5526 {
5527 if (code == NOT)
5528 {
5529 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5530 OPTAB_LIB_WIDEN);
5531 ret = expand_simple_unop (mode, NOT, ret, target, true);
5532 }
5533 else
5534 ret = expand_simple_binop (mode, code, ret, val, target, true,
5535 OPTAB_LIB_WIDEN);
5536 }
5537 return ret;
5538 }
5539
5540 /* Expand an atomic clear operation.
5541 void _atomic_clear (BOOL *obj, enum memmodel)
5542 EXP is the call expression. */
5543
5544 static rtx
5545 expand_builtin_atomic_clear (tree exp)
5546 {
5547 machine_mode mode;
5548 rtx mem, ret;
5549 enum memmodel model;
5550
5551 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5552 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5553 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5554
5555 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5556 {
5557 warning (OPT_Winvalid_memory_model,
5558 "invalid memory model for %<__atomic_store%>");
5559 model = MEMMODEL_SEQ_CST;
5560 }
5561
5562 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5563 Failing that, a store is issued by __atomic_store. The only way this can
5564 fail is if the bool type is larger than a word size. Unlikely, but
5565 handle it anyway for completeness. Assume a single threaded model since
5566 there is no atomic support in this case, and no barriers are required. */
5567 ret = expand_atomic_store (mem, const0_rtx, model, true);
5568 if (!ret)
5569 emit_move_insn (mem, const0_rtx);
5570 return const0_rtx;
5571 }
5572
5573 /* Expand an atomic test_and_set operation.
5574 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5575 EXP is the call expression. */
5576
5577 static rtx
5578 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5579 {
5580 rtx mem;
5581 enum memmodel model;
5582 machine_mode mode;
5583
5584 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5585 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5586 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5587
5588 return expand_atomic_test_and_set (target, mem, model);
5589 }
5590
5591
5592 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5593 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5594
5595 static tree
5596 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5597 {
5598 int size;
5599 machine_mode mode;
5600 unsigned int mode_align, type_align;
5601
5602 if (TREE_CODE (arg0) != INTEGER_CST)
5603 return NULL_TREE;
5604
5605 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5606 mode = mode_for_size (size, MODE_INT, 0);
5607 mode_align = GET_MODE_ALIGNMENT (mode);
5608
5609 if (TREE_CODE (arg1) == INTEGER_CST)
5610 {
5611 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5612
5613 /* Either this argument is null, or it's a fake pointer encoding
5614 the alignment of the object. */
5615 val = val & -val;
5616 val *= BITS_PER_UNIT;
5617
5618 if (val == 0 || mode_align < val)
5619 type_align = mode_align;
5620 else
5621 type_align = val;
5622 }
5623 else
5624 {
5625 tree ttype = TREE_TYPE (arg1);
5626
5627 /* This function is usually invoked and folded immediately by the front
5628 end before anything else has a chance to look at it. The pointer
5629 parameter at this point is usually cast to a void *, so check for that
5630 and look past the cast. */
5631 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5632 && VOID_TYPE_P (TREE_TYPE (ttype)))
5633 arg1 = TREE_OPERAND (arg1, 0);
5634
5635 ttype = TREE_TYPE (arg1);
5636 gcc_assert (POINTER_TYPE_P (ttype));
5637
5638 /* Get the underlying type of the object. */
5639 ttype = TREE_TYPE (ttype);
5640 type_align = TYPE_ALIGN (ttype);
5641 }
5642
5643 /* If the object has smaller alignment, the lock free routines cannot
5644 be used. */
5645 if (type_align < mode_align)
5646 return boolean_false_node;
5647
5648 /* Check if a compare_and_swap pattern exists for the mode which represents
5649 the required size. The pattern is not allowed to fail, so the existence
5650 of the pattern indicates support is present. */
5651 if (can_compare_and_swap_p (mode, true))
5652 return boolean_true_node;
5653 else
5654 return boolean_false_node;
5655 }
5656
5657 /* Return true if the parameters to call EXP represent an object which will
5658 always generate lock free instructions. The first argument represents the
5659 size of the object, and the second parameter is a pointer to the object
5660 itself. If NULL is passed for the object, then the result is based on
5661 typical alignment for an object of the specified size. Otherwise return
5662 false. */
5663
5664 static rtx
5665 expand_builtin_atomic_always_lock_free (tree exp)
5666 {
5667 tree size;
5668 tree arg0 = CALL_EXPR_ARG (exp, 0);
5669 tree arg1 = CALL_EXPR_ARG (exp, 1);
5670
5671 if (TREE_CODE (arg0) != INTEGER_CST)
5672 {
5673 error ("non-constant argument 1 to __atomic_always_lock_free");
5674 return const0_rtx;
5675 }
5676
5677 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5678 if (size == boolean_true_node)
5679 return const1_rtx;
5680 return const0_rtx;
5681 }
5682
5683 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5684 is lock free on this architecture. */
5685
5686 static tree
5687 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5688 {
5689 if (!flag_inline_atomics)
5690 return NULL_TREE;
5691
5692 /* If it isn't always lock free, don't generate a result. */
5693 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5694 return boolean_true_node;
5695
5696 return NULL_TREE;
5697 }
5698
5699 /* Return true if the parameters to call EXP represent an object which will
5700 always generate lock free instructions. The first argument represents the
5701 size of the object, and the second parameter is a pointer to the object
5702 itself. If NULL is passed for the object, then the result is based on
5703 typical alignment for an object of the specified size. Otherwise return
5704 NULL*/
5705
5706 static rtx
5707 expand_builtin_atomic_is_lock_free (tree exp)
5708 {
5709 tree size;
5710 tree arg0 = CALL_EXPR_ARG (exp, 0);
5711 tree arg1 = CALL_EXPR_ARG (exp, 1);
5712
5713 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5714 {
5715 error ("non-integer argument 1 to __atomic_is_lock_free");
5716 return NULL_RTX;
5717 }
5718
5719 if (!flag_inline_atomics)
5720 return NULL_RTX;
5721
5722 /* If the value is known at compile time, return the RTX for it. */
5723 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5724 if (size == boolean_true_node)
5725 return const1_rtx;
5726
5727 return NULL_RTX;
5728 }
5729
5730 /* Expand the __atomic_thread_fence intrinsic:
5731 void __atomic_thread_fence (enum memmodel)
5732 EXP is the CALL_EXPR. */
5733
5734 static void
5735 expand_builtin_atomic_thread_fence (tree exp)
5736 {
5737 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5738 expand_mem_thread_fence (model);
5739 }
5740
5741 /* Expand the __atomic_signal_fence intrinsic:
5742 void __atomic_signal_fence (enum memmodel)
5743 EXP is the CALL_EXPR. */
5744
5745 static void
5746 expand_builtin_atomic_signal_fence (tree exp)
5747 {
5748 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5749 expand_mem_signal_fence (model);
5750 }
5751
5752 /* Expand the __sync_synchronize intrinsic. */
5753
5754 static void
5755 expand_builtin_sync_synchronize (void)
5756 {
5757 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5758 }
5759
5760 static rtx
5761 expand_builtin_thread_pointer (tree exp, rtx target)
5762 {
5763 enum insn_code icode;
5764 if (!validate_arglist (exp, VOID_TYPE))
5765 return const0_rtx;
5766 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5767 if (icode != CODE_FOR_nothing)
5768 {
5769 struct expand_operand op;
5770 /* If the target is not sutitable then create a new target. */
5771 if (target == NULL_RTX
5772 || !REG_P (target)
5773 || GET_MODE (target) != Pmode)
5774 target = gen_reg_rtx (Pmode);
5775 create_output_operand (&op, target, Pmode);
5776 expand_insn (icode, 1, &op);
5777 return target;
5778 }
5779 error ("__builtin_thread_pointer is not supported on this target");
5780 return const0_rtx;
5781 }
5782
5783 static void
5784 expand_builtin_set_thread_pointer (tree exp)
5785 {
5786 enum insn_code icode;
5787 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5788 return;
5789 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5790 if (icode != CODE_FOR_nothing)
5791 {
5792 struct expand_operand op;
5793 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5794 Pmode, EXPAND_NORMAL);
5795 create_input_operand (&op, val, Pmode);
5796 expand_insn (icode, 1, &op);
5797 return;
5798 }
5799 error ("__builtin_set_thread_pointer is not supported on this target");
5800 }
5801
5802 \f
5803 /* Emit code to restore the current value of stack. */
5804
5805 static void
5806 expand_stack_restore (tree var)
5807 {
5808 rtx_insn *prev;
5809 rtx sa = expand_normal (var);
5810
5811 sa = convert_memory_address (Pmode, sa);
5812
5813 prev = get_last_insn ();
5814 emit_stack_restore (SAVE_BLOCK, sa);
5815
5816 record_new_stack_level ();
5817
5818 fixup_args_size_notes (prev, get_last_insn (), 0);
5819 }
5820
5821 /* Emit code to save the current value of stack. */
5822
5823 static rtx
5824 expand_stack_save (void)
5825 {
5826 rtx ret = NULL_RTX;
5827
5828 emit_stack_save (SAVE_BLOCK, &ret);
5829 return ret;
5830 }
5831
5832
5833 /* Expand an expression EXP that calls a built-in function,
5834 with result going to TARGET if that's convenient
5835 (and in mode MODE if that's convenient).
5836 SUBTARGET may be used as the target for computing one of EXP's operands.
5837 IGNORE is nonzero if the value is to be ignored. */
5838
5839 rtx
5840 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5841 int ignore)
5842 {
5843 tree fndecl = get_callee_fndecl (exp);
5844 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5845 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5846 int flags;
5847
5848 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5849 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5850
5851 /* When ASan is enabled, we don't want to expand some memory/string
5852 builtins and rely on libsanitizer's hooks. This allows us to avoid
5853 redundant checks and be sure, that possible overflow will be detected
5854 by ASan. */
5855
5856 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5857 return expand_call (exp, target, ignore);
5858
5859 /* When not optimizing, generate calls to library functions for a certain
5860 set of builtins. */
5861 if (!optimize
5862 && !called_as_built_in (fndecl)
5863 && fcode != BUILT_IN_FORK
5864 && fcode != BUILT_IN_EXECL
5865 && fcode != BUILT_IN_EXECV
5866 && fcode != BUILT_IN_EXECLP
5867 && fcode != BUILT_IN_EXECLE
5868 && fcode != BUILT_IN_EXECVP
5869 && fcode != BUILT_IN_EXECVE
5870 && fcode != BUILT_IN_ALLOCA
5871 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5872 && fcode != BUILT_IN_FREE
5873 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5874 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5875 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5876 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5877 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5878 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5879 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5880 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5881 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5882 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5883 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5884 && fcode != BUILT_IN_CHKP_BNDRET)
5885 return expand_call (exp, target, ignore);
5886
5887 /* The built-in function expanders test for target == const0_rtx
5888 to determine whether the function's result will be ignored. */
5889 if (ignore)
5890 target = const0_rtx;
5891
5892 /* If the result of a pure or const built-in function is ignored, and
5893 none of its arguments are volatile, we can avoid expanding the
5894 built-in call and just evaluate the arguments for side-effects. */
5895 if (target == const0_rtx
5896 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5897 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5898 {
5899 bool volatilep = false;
5900 tree arg;
5901 call_expr_arg_iterator iter;
5902
5903 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5904 if (TREE_THIS_VOLATILE (arg))
5905 {
5906 volatilep = true;
5907 break;
5908 }
5909
5910 if (! volatilep)
5911 {
5912 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5913 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5914 return const0_rtx;
5915 }
5916 }
5917
5918 /* expand_builtin_with_bounds is supposed to be used for
5919 instrumented builtin calls. */
5920 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5921
5922 switch (fcode)
5923 {
5924 CASE_FLT_FN (BUILT_IN_FABS):
5925 case BUILT_IN_FABSD32:
5926 case BUILT_IN_FABSD64:
5927 case BUILT_IN_FABSD128:
5928 target = expand_builtin_fabs (exp, target, subtarget);
5929 if (target)
5930 return target;
5931 break;
5932
5933 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5934 target = expand_builtin_copysign (exp, target, subtarget);
5935 if (target)
5936 return target;
5937 break;
5938
5939 /* Just do a normal library call if we were unable to fold
5940 the values. */
5941 CASE_FLT_FN (BUILT_IN_CABS):
5942 break;
5943
5944 CASE_FLT_FN (BUILT_IN_EXP):
5945 CASE_FLT_FN (BUILT_IN_EXP10):
5946 CASE_FLT_FN (BUILT_IN_POW10):
5947 CASE_FLT_FN (BUILT_IN_EXP2):
5948 CASE_FLT_FN (BUILT_IN_EXPM1):
5949 CASE_FLT_FN (BUILT_IN_LOGB):
5950 CASE_FLT_FN (BUILT_IN_LOG):
5951 CASE_FLT_FN (BUILT_IN_LOG10):
5952 CASE_FLT_FN (BUILT_IN_LOG2):
5953 CASE_FLT_FN (BUILT_IN_LOG1P):
5954 CASE_FLT_FN (BUILT_IN_TAN):
5955 CASE_FLT_FN (BUILT_IN_ASIN):
5956 CASE_FLT_FN (BUILT_IN_ACOS):
5957 CASE_FLT_FN (BUILT_IN_ATAN):
5958 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5959 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5960 because of possible accuracy problems. */
5961 if (! flag_unsafe_math_optimizations)
5962 break;
5963 CASE_FLT_FN (BUILT_IN_SQRT):
5964 CASE_FLT_FN (BUILT_IN_FLOOR):
5965 CASE_FLT_FN (BUILT_IN_CEIL):
5966 CASE_FLT_FN (BUILT_IN_TRUNC):
5967 CASE_FLT_FN (BUILT_IN_ROUND):
5968 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5969 CASE_FLT_FN (BUILT_IN_RINT):
5970 target = expand_builtin_mathfn (exp, target, subtarget);
5971 if (target)
5972 return target;
5973 break;
5974
5975 CASE_FLT_FN (BUILT_IN_FMA):
5976 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5977 if (target)
5978 return target;
5979 break;
5980
5981 CASE_FLT_FN (BUILT_IN_ILOGB):
5982 if (! flag_unsafe_math_optimizations)
5983 break;
5984 CASE_FLT_FN (BUILT_IN_ISINF):
5985 CASE_FLT_FN (BUILT_IN_FINITE):
5986 case BUILT_IN_ISFINITE:
5987 case BUILT_IN_ISNORMAL:
5988 target = expand_builtin_interclass_mathfn (exp, target);
5989 if (target)
5990 return target;
5991 break;
5992
5993 CASE_FLT_FN (BUILT_IN_ICEIL):
5994 CASE_FLT_FN (BUILT_IN_LCEIL):
5995 CASE_FLT_FN (BUILT_IN_LLCEIL):
5996 CASE_FLT_FN (BUILT_IN_LFLOOR):
5997 CASE_FLT_FN (BUILT_IN_IFLOOR):
5998 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5999 target = expand_builtin_int_roundingfn (exp, target);
6000 if (target)
6001 return target;
6002 break;
6003
6004 CASE_FLT_FN (BUILT_IN_IRINT):
6005 CASE_FLT_FN (BUILT_IN_LRINT):
6006 CASE_FLT_FN (BUILT_IN_LLRINT):
6007 CASE_FLT_FN (BUILT_IN_IROUND):
6008 CASE_FLT_FN (BUILT_IN_LROUND):
6009 CASE_FLT_FN (BUILT_IN_LLROUND):
6010 target = expand_builtin_int_roundingfn_2 (exp, target);
6011 if (target)
6012 return target;
6013 break;
6014
6015 CASE_FLT_FN (BUILT_IN_POWI):
6016 target = expand_builtin_powi (exp, target);
6017 if (target)
6018 return target;
6019 break;
6020
6021 CASE_FLT_FN (BUILT_IN_ATAN2):
6022 CASE_FLT_FN (BUILT_IN_LDEXP):
6023 CASE_FLT_FN (BUILT_IN_SCALB):
6024 CASE_FLT_FN (BUILT_IN_SCALBN):
6025 CASE_FLT_FN (BUILT_IN_SCALBLN):
6026 if (! flag_unsafe_math_optimizations)
6027 break;
6028
6029 CASE_FLT_FN (BUILT_IN_FMOD):
6030 CASE_FLT_FN (BUILT_IN_REMAINDER):
6031 CASE_FLT_FN (BUILT_IN_DREM):
6032 CASE_FLT_FN (BUILT_IN_POW):
6033 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6034 if (target)
6035 return target;
6036 break;
6037
6038 CASE_FLT_FN (BUILT_IN_CEXPI):
6039 target = expand_builtin_cexpi (exp, target);
6040 gcc_assert (target);
6041 return target;
6042
6043 CASE_FLT_FN (BUILT_IN_SIN):
6044 CASE_FLT_FN (BUILT_IN_COS):
6045 if (! flag_unsafe_math_optimizations)
6046 break;
6047 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6048 if (target)
6049 return target;
6050 break;
6051
6052 CASE_FLT_FN (BUILT_IN_SINCOS):
6053 if (! flag_unsafe_math_optimizations)
6054 break;
6055 target = expand_builtin_sincos (exp);
6056 if (target)
6057 return target;
6058 break;
6059
6060 case BUILT_IN_APPLY_ARGS:
6061 return expand_builtin_apply_args ();
6062
6063 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6064 FUNCTION with a copy of the parameters described by
6065 ARGUMENTS, and ARGSIZE. It returns a block of memory
6066 allocated on the stack into which is stored all the registers
6067 that might possibly be used for returning the result of a
6068 function. ARGUMENTS is the value returned by
6069 __builtin_apply_args. ARGSIZE is the number of bytes of
6070 arguments that must be copied. ??? How should this value be
6071 computed? We'll also need a safe worst case value for varargs
6072 functions. */
6073 case BUILT_IN_APPLY:
6074 if (!validate_arglist (exp, POINTER_TYPE,
6075 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6076 && !validate_arglist (exp, REFERENCE_TYPE,
6077 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6078 return const0_rtx;
6079 else
6080 {
6081 rtx ops[3];
6082
6083 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6084 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6085 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6086
6087 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6088 }
6089
6090 /* __builtin_return (RESULT) causes the function to return the
6091 value described by RESULT. RESULT is address of the block of
6092 memory returned by __builtin_apply. */
6093 case BUILT_IN_RETURN:
6094 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6095 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6096 return const0_rtx;
6097
6098 case BUILT_IN_SAVEREGS:
6099 return expand_builtin_saveregs ();
6100
6101 case BUILT_IN_VA_ARG_PACK:
6102 /* All valid uses of __builtin_va_arg_pack () are removed during
6103 inlining. */
6104 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6105 return const0_rtx;
6106
6107 case BUILT_IN_VA_ARG_PACK_LEN:
6108 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6109 inlining. */
6110 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6111 return const0_rtx;
6112
6113 /* Return the address of the first anonymous stack arg. */
6114 case BUILT_IN_NEXT_ARG:
6115 if (fold_builtin_next_arg (exp, false))
6116 return const0_rtx;
6117 return expand_builtin_next_arg ();
6118
6119 case BUILT_IN_CLEAR_CACHE:
6120 target = expand_builtin___clear_cache (exp);
6121 if (target)
6122 return target;
6123 break;
6124
6125 case BUILT_IN_CLASSIFY_TYPE:
6126 return expand_builtin_classify_type (exp);
6127
6128 case BUILT_IN_CONSTANT_P:
6129 return const0_rtx;
6130
6131 case BUILT_IN_FRAME_ADDRESS:
6132 case BUILT_IN_RETURN_ADDRESS:
6133 return expand_builtin_frame_address (fndecl, exp);
6134
6135 /* Returns the address of the area where the structure is returned.
6136 0 otherwise. */
6137 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6138 if (call_expr_nargs (exp) != 0
6139 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6140 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6141 return const0_rtx;
6142 else
6143 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6144
6145 case BUILT_IN_ALLOCA:
6146 case BUILT_IN_ALLOCA_WITH_ALIGN:
6147 /* If the allocation stems from the declaration of a variable-sized
6148 object, it cannot accumulate. */
6149 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6150 if (target)
6151 return target;
6152 break;
6153
6154 case BUILT_IN_STACK_SAVE:
6155 return expand_stack_save ();
6156
6157 case BUILT_IN_STACK_RESTORE:
6158 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6159 return const0_rtx;
6160
6161 case BUILT_IN_BSWAP16:
6162 case BUILT_IN_BSWAP32:
6163 case BUILT_IN_BSWAP64:
6164 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6165 if (target)
6166 return target;
6167 break;
6168
6169 CASE_INT_FN (BUILT_IN_FFS):
6170 target = expand_builtin_unop (target_mode, exp, target,
6171 subtarget, ffs_optab);
6172 if (target)
6173 return target;
6174 break;
6175
6176 CASE_INT_FN (BUILT_IN_CLZ):
6177 target = expand_builtin_unop (target_mode, exp, target,
6178 subtarget, clz_optab);
6179 if (target)
6180 return target;
6181 break;
6182
6183 CASE_INT_FN (BUILT_IN_CTZ):
6184 target = expand_builtin_unop (target_mode, exp, target,
6185 subtarget, ctz_optab);
6186 if (target)
6187 return target;
6188 break;
6189
6190 CASE_INT_FN (BUILT_IN_CLRSB):
6191 target = expand_builtin_unop (target_mode, exp, target,
6192 subtarget, clrsb_optab);
6193 if (target)
6194 return target;
6195 break;
6196
6197 CASE_INT_FN (BUILT_IN_POPCOUNT):
6198 target = expand_builtin_unop (target_mode, exp, target,
6199 subtarget, popcount_optab);
6200 if (target)
6201 return target;
6202 break;
6203
6204 CASE_INT_FN (BUILT_IN_PARITY):
6205 target = expand_builtin_unop (target_mode, exp, target,
6206 subtarget, parity_optab);
6207 if (target)
6208 return target;
6209 break;
6210
6211 case BUILT_IN_STRLEN:
6212 target = expand_builtin_strlen (exp, target, target_mode);
6213 if (target)
6214 return target;
6215 break;
6216
6217 case BUILT_IN_STRCPY:
6218 target = expand_builtin_strcpy (exp, target);
6219 if (target)
6220 return target;
6221 break;
6222
6223 case BUILT_IN_STRNCPY:
6224 target = expand_builtin_strncpy (exp, target);
6225 if (target)
6226 return target;
6227 break;
6228
6229 case BUILT_IN_STPCPY:
6230 target = expand_builtin_stpcpy (exp, target, mode);
6231 if (target)
6232 return target;
6233 break;
6234
6235 case BUILT_IN_MEMCPY:
6236 target = expand_builtin_memcpy (exp, target);
6237 if (target)
6238 return target;
6239 break;
6240
6241 case BUILT_IN_MEMPCPY:
6242 target = expand_builtin_mempcpy (exp, target, mode);
6243 if (target)
6244 return target;
6245 break;
6246
6247 case BUILT_IN_MEMSET:
6248 target = expand_builtin_memset (exp, target, mode);
6249 if (target)
6250 return target;
6251 break;
6252
6253 case BUILT_IN_BZERO:
6254 target = expand_builtin_bzero (exp);
6255 if (target)
6256 return target;
6257 break;
6258
6259 case BUILT_IN_STRCMP:
6260 target = expand_builtin_strcmp (exp, target);
6261 if (target)
6262 return target;
6263 break;
6264
6265 case BUILT_IN_STRNCMP:
6266 target = expand_builtin_strncmp (exp, target, mode);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_BCMP:
6272 case BUILT_IN_MEMCMP:
6273 target = expand_builtin_memcmp (exp, target);
6274 if (target)
6275 return target;
6276 break;
6277
6278 case BUILT_IN_SETJMP:
6279 /* This should have been lowered to the builtins below. */
6280 gcc_unreachable ();
6281
6282 case BUILT_IN_SETJMP_SETUP:
6283 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6284 and the receiver label. */
6285 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6286 {
6287 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6288 VOIDmode, EXPAND_NORMAL);
6289 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6290 rtx_insn *label_r = label_rtx (label);
6291
6292 /* This is copied from the handling of non-local gotos. */
6293 expand_builtin_setjmp_setup (buf_addr, label_r);
6294 nonlocal_goto_handler_labels
6295 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6296 nonlocal_goto_handler_labels);
6297 /* ??? Do not let expand_label treat us as such since we would
6298 not want to be both on the list of non-local labels and on
6299 the list of forced labels. */
6300 FORCED_LABEL (label) = 0;
6301 return const0_rtx;
6302 }
6303 break;
6304
6305 case BUILT_IN_SETJMP_RECEIVER:
6306 /* __builtin_setjmp_receiver is passed the receiver label. */
6307 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6308 {
6309 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6310 rtx_insn *label_r = label_rtx (label);
6311
6312 expand_builtin_setjmp_receiver (label_r);
6313 return const0_rtx;
6314 }
6315 break;
6316
6317 /* __builtin_longjmp is passed a pointer to an array of five words.
6318 It's similar to the C library longjmp function but works with
6319 __builtin_setjmp above. */
6320 case BUILT_IN_LONGJMP:
6321 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6322 {
6323 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6324 VOIDmode, EXPAND_NORMAL);
6325 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6326
6327 if (value != const1_rtx)
6328 {
6329 error ("%<__builtin_longjmp%> second argument must be 1");
6330 return const0_rtx;
6331 }
6332
6333 expand_builtin_longjmp (buf_addr, value);
6334 return const0_rtx;
6335 }
6336 break;
6337
6338 case BUILT_IN_NONLOCAL_GOTO:
6339 target = expand_builtin_nonlocal_goto (exp);
6340 if (target)
6341 return target;
6342 break;
6343
6344 /* This updates the setjmp buffer that is its argument with the value
6345 of the current stack pointer. */
6346 case BUILT_IN_UPDATE_SETJMP_BUF:
6347 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6348 {
6349 rtx buf_addr
6350 = expand_normal (CALL_EXPR_ARG (exp, 0));
6351
6352 expand_builtin_update_setjmp_buf (buf_addr);
6353 return const0_rtx;
6354 }
6355 break;
6356
6357 case BUILT_IN_TRAP:
6358 expand_builtin_trap ();
6359 return const0_rtx;
6360
6361 case BUILT_IN_UNREACHABLE:
6362 expand_builtin_unreachable ();
6363 return const0_rtx;
6364
6365 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6366 case BUILT_IN_SIGNBITD32:
6367 case BUILT_IN_SIGNBITD64:
6368 case BUILT_IN_SIGNBITD128:
6369 target = expand_builtin_signbit (exp, target);
6370 if (target)
6371 return target;
6372 break;
6373
6374 /* Various hooks for the DWARF 2 __throw routine. */
6375 case BUILT_IN_UNWIND_INIT:
6376 expand_builtin_unwind_init ();
6377 return const0_rtx;
6378 case BUILT_IN_DWARF_CFA:
6379 return virtual_cfa_rtx;
6380 #ifdef DWARF2_UNWIND_INFO
6381 case BUILT_IN_DWARF_SP_COLUMN:
6382 return expand_builtin_dwarf_sp_column ();
6383 case BUILT_IN_INIT_DWARF_REG_SIZES:
6384 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6385 return const0_rtx;
6386 #endif
6387 case BUILT_IN_FROB_RETURN_ADDR:
6388 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6389 case BUILT_IN_EXTRACT_RETURN_ADDR:
6390 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6391 case BUILT_IN_EH_RETURN:
6392 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6393 CALL_EXPR_ARG (exp, 1));
6394 return const0_rtx;
6395 case BUILT_IN_EH_RETURN_DATA_REGNO:
6396 return expand_builtin_eh_return_data_regno (exp);
6397 case BUILT_IN_EXTEND_POINTER:
6398 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6399 case BUILT_IN_EH_POINTER:
6400 return expand_builtin_eh_pointer (exp);
6401 case BUILT_IN_EH_FILTER:
6402 return expand_builtin_eh_filter (exp);
6403 case BUILT_IN_EH_COPY_VALUES:
6404 return expand_builtin_eh_copy_values (exp);
6405
6406 case BUILT_IN_VA_START:
6407 return expand_builtin_va_start (exp);
6408 case BUILT_IN_VA_END:
6409 return expand_builtin_va_end (exp);
6410 case BUILT_IN_VA_COPY:
6411 return expand_builtin_va_copy (exp);
6412 case BUILT_IN_EXPECT:
6413 return expand_builtin_expect (exp, target);
6414 case BUILT_IN_ASSUME_ALIGNED:
6415 return expand_builtin_assume_aligned (exp, target);
6416 case BUILT_IN_PREFETCH:
6417 expand_builtin_prefetch (exp);
6418 return const0_rtx;
6419
6420 case BUILT_IN_INIT_TRAMPOLINE:
6421 return expand_builtin_init_trampoline (exp, true);
6422 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6423 return expand_builtin_init_trampoline (exp, false);
6424 case BUILT_IN_ADJUST_TRAMPOLINE:
6425 return expand_builtin_adjust_trampoline (exp);
6426
6427 case BUILT_IN_FORK:
6428 case BUILT_IN_EXECL:
6429 case BUILT_IN_EXECV:
6430 case BUILT_IN_EXECLP:
6431 case BUILT_IN_EXECLE:
6432 case BUILT_IN_EXECVP:
6433 case BUILT_IN_EXECVE:
6434 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6435 if (target)
6436 return target;
6437 break;
6438
6439 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6440 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6441 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6442 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6443 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6444 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6445 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6446 if (target)
6447 return target;
6448 break;
6449
6450 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6451 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6452 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6453 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6454 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6455 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6456 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6457 if (target)
6458 return target;
6459 break;
6460
6461 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6462 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6463 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6464 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6465 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6466 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6467 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6468 if (target)
6469 return target;
6470 break;
6471
6472 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6473 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6474 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6475 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6476 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6477 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6478 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6479 if (target)
6480 return target;
6481 break;
6482
6483 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6484 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6485 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6486 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6487 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6488 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6489 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6490 if (target)
6491 return target;
6492 break;
6493
6494 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6495 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6496 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6497 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6498 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6500 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6501 if (target)
6502 return target;
6503 break;
6504
6505 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6506 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6507 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6508 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6509 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6510 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6511 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6512 if (target)
6513 return target;
6514 break;
6515
6516 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6517 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6518 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6519 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6520 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6521 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6522 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6523 if (target)
6524 return target;
6525 break;
6526
6527 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6528 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6529 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6530 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6531 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6532 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6533 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6534 if (target)
6535 return target;
6536 break;
6537
6538 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6539 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6540 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6541 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6542 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6544 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6545 if (target)
6546 return target;
6547 break;
6548
6549 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6550 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6551 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6552 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6553 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6555 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6556 if (target)
6557 return target;
6558 break;
6559
6560 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6561 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6562 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6563 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6564 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6565 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6566 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6567 if (target)
6568 return target;
6569 break;
6570
6571 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6572 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6573 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6574 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6575 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6576 if (mode == VOIDmode)
6577 mode = TYPE_MODE (boolean_type_node);
6578 if (!target || !register_operand (target, mode))
6579 target = gen_reg_rtx (mode);
6580
6581 mode = get_builtin_sync_mode
6582 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6583 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6584 if (target)
6585 return target;
6586 break;
6587
6588 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6589 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6590 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6591 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6592 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6593 mode = get_builtin_sync_mode
6594 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6595 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6596 if (target)
6597 return target;
6598 break;
6599
6600 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6601 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6603 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6604 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6605 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6606 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6607 if (target)
6608 return target;
6609 break;
6610
6611 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6612 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6613 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6614 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6615 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6616 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6617 expand_builtin_sync_lock_release (mode, exp);
6618 return const0_rtx;
6619
6620 case BUILT_IN_SYNC_SYNCHRONIZE:
6621 expand_builtin_sync_synchronize ();
6622 return const0_rtx;
6623
6624 case BUILT_IN_ATOMIC_EXCHANGE_1:
6625 case BUILT_IN_ATOMIC_EXCHANGE_2:
6626 case BUILT_IN_ATOMIC_EXCHANGE_4:
6627 case BUILT_IN_ATOMIC_EXCHANGE_8:
6628 case BUILT_IN_ATOMIC_EXCHANGE_16:
6629 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6630 target = expand_builtin_atomic_exchange (mode, exp, target);
6631 if (target)
6632 return target;
6633 break;
6634
6635 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6636 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6637 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6638 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6639 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6640 {
6641 unsigned int nargs, z;
6642 vec<tree, va_gc> *vec;
6643
6644 mode =
6645 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6646 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6647 if (target)
6648 return target;
6649
6650 /* If this is turned into an external library call, the weak parameter
6651 must be dropped to match the expected parameter list. */
6652 nargs = call_expr_nargs (exp);
6653 vec_alloc (vec, nargs - 1);
6654 for (z = 0; z < 3; z++)
6655 vec->quick_push (CALL_EXPR_ARG (exp, z));
6656 /* Skip the boolean weak parameter. */
6657 for (z = 4; z < 6; z++)
6658 vec->quick_push (CALL_EXPR_ARG (exp, z));
6659 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6660 break;
6661 }
6662
6663 case BUILT_IN_ATOMIC_LOAD_1:
6664 case BUILT_IN_ATOMIC_LOAD_2:
6665 case BUILT_IN_ATOMIC_LOAD_4:
6666 case BUILT_IN_ATOMIC_LOAD_8:
6667 case BUILT_IN_ATOMIC_LOAD_16:
6668 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6669 target = expand_builtin_atomic_load (mode, exp, target);
6670 if (target)
6671 return target;
6672 break;
6673
6674 case BUILT_IN_ATOMIC_STORE_1:
6675 case BUILT_IN_ATOMIC_STORE_2:
6676 case BUILT_IN_ATOMIC_STORE_4:
6677 case BUILT_IN_ATOMIC_STORE_8:
6678 case BUILT_IN_ATOMIC_STORE_16:
6679 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6680 target = expand_builtin_atomic_store (mode, exp);
6681 if (target)
6682 return const0_rtx;
6683 break;
6684
6685 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6686 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6687 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6688 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6689 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6690 {
6691 enum built_in_function lib;
6692 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6693 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6694 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6695 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6696 ignore, lib);
6697 if (target)
6698 return target;
6699 break;
6700 }
6701 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6702 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6703 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6704 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6705 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6706 {
6707 enum built_in_function lib;
6708 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6709 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6710 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6711 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6712 ignore, lib);
6713 if (target)
6714 return target;
6715 break;
6716 }
6717 case BUILT_IN_ATOMIC_AND_FETCH_1:
6718 case BUILT_IN_ATOMIC_AND_FETCH_2:
6719 case BUILT_IN_ATOMIC_AND_FETCH_4:
6720 case BUILT_IN_ATOMIC_AND_FETCH_8:
6721 case BUILT_IN_ATOMIC_AND_FETCH_16:
6722 {
6723 enum built_in_function lib;
6724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6725 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6726 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6727 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6728 ignore, lib);
6729 if (target)
6730 return target;
6731 break;
6732 }
6733 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6734 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6735 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6736 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6737 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6738 {
6739 enum built_in_function lib;
6740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6741 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6742 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6744 ignore, lib);
6745 if (target)
6746 return target;
6747 break;
6748 }
6749 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6750 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6751 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6752 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6753 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6754 {
6755 enum built_in_function lib;
6756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6757 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6758 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6759 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6760 ignore, lib);
6761 if (target)
6762 return target;
6763 break;
6764 }
6765 case BUILT_IN_ATOMIC_OR_FETCH_1:
6766 case BUILT_IN_ATOMIC_OR_FETCH_2:
6767 case BUILT_IN_ATOMIC_OR_FETCH_4:
6768 case BUILT_IN_ATOMIC_OR_FETCH_8:
6769 case BUILT_IN_ATOMIC_OR_FETCH_16:
6770 {
6771 enum built_in_function lib;
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6773 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6774 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6775 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6776 ignore, lib);
6777 if (target)
6778 return target;
6779 break;
6780 }
6781 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6782 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6783 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6784 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6785 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6787 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6788 ignore, BUILT_IN_NONE);
6789 if (target)
6790 return target;
6791 break;
6792
6793 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6794 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6795 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6796 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6797 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6798 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6799 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6800 ignore, BUILT_IN_NONE);
6801 if (target)
6802 return target;
6803 break;
6804
6805 case BUILT_IN_ATOMIC_FETCH_AND_1:
6806 case BUILT_IN_ATOMIC_FETCH_AND_2:
6807 case BUILT_IN_ATOMIC_FETCH_AND_4:
6808 case BUILT_IN_ATOMIC_FETCH_AND_8:
6809 case BUILT_IN_ATOMIC_FETCH_AND_16:
6810 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6811 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6812 ignore, BUILT_IN_NONE);
6813 if (target)
6814 return target;
6815 break;
6816
6817 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6818 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6819 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6820 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6821 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6822 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6823 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6824 ignore, BUILT_IN_NONE);
6825 if (target)
6826 return target;
6827 break;
6828
6829 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6830 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6831 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6832 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6833 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6834 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6835 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6836 ignore, BUILT_IN_NONE);
6837 if (target)
6838 return target;
6839 break;
6840
6841 case BUILT_IN_ATOMIC_FETCH_OR_1:
6842 case BUILT_IN_ATOMIC_FETCH_OR_2:
6843 case BUILT_IN_ATOMIC_FETCH_OR_4:
6844 case BUILT_IN_ATOMIC_FETCH_OR_8:
6845 case BUILT_IN_ATOMIC_FETCH_OR_16:
6846 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6847 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6848 ignore, BUILT_IN_NONE);
6849 if (target)
6850 return target;
6851 break;
6852
6853 case BUILT_IN_ATOMIC_TEST_AND_SET:
6854 return expand_builtin_atomic_test_and_set (exp, target);
6855
6856 case BUILT_IN_ATOMIC_CLEAR:
6857 return expand_builtin_atomic_clear (exp);
6858
6859 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6860 return expand_builtin_atomic_always_lock_free (exp);
6861
6862 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6863 target = expand_builtin_atomic_is_lock_free (exp);
6864 if (target)
6865 return target;
6866 break;
6867
6868 case BUILT_IN_ATOMIC_THREAD_FENCE:
6869 expand_builtin_atomic_thread_fence (exp);
6870 return const0_rtx;
6871
6872 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6873 expand_builtin_atomic_signal_fence (exp);
6874 return const0_rtx;
6875
6876 case BUILT_IN_OBJECT_SIZE:
6877 return expand_builtin_object_size (exp);
6878
6879 case BUILT_IN_MEMCPY_CHK:
6880 case BUILT_IN_MEMPCPY_CHK:
6881 case BUILT_IN_MEMMOVE_CHK:
6882 case BUILT_IN_MEMSET_CHK:
6883 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6884 if (target)
6885 return target;
6886 break;
6887
6888 case BUILT_IN_STRCPY_CHK:
6889 case BUILT_IN_STPCPY_CHK:
6890 case BUILT_IN_STRNCPY_CHK:
6891 case BUILT_IN_STPNCPY_CHK:
6892 case BUILT_IN_STRCAT_CHK:
6893 case BUILT_IN_STRNCAT_CHK:
6894 case BUILT_IN_SNPRINTF_CHK:
6895 case BUILT_IN_VSNPRINTF_CHK:
6896 maybe_emit_chk_warning (exp, fcode);
6897 break;
6898
6899 case BUILT_IN_SPRINTF_CHK:
6900 case BUILT_IN_VSPRINTF_CHK:
6901 maybe_emit_sprintf_chk_warning (exp, fcode);
6902 break;
6903
6904 case BUILT_IN_FREE:
6905 if (warn_free_nonheap_object)
6906 maybe_emit_free_warning (exp);
6907 break;
6908
6909 case BUILT_IN_THREAD_POINTER:
6910 return expand_builtin_thread_pointer (exp, target);
6911
6912 case BUILT_IN_SET_THREAD_POINTER:
6913 expand_builtin_set_thread_pointer (exp);
6914 return const0_rtx;
6915
6916 case BUILT_IN_CILK_DETACH:
6917 expand_builtin_cilk_detach (exp);
6918 return const0_rtx;
6919
6920 case BUILT_IN_CILK_POP_FRAME:
6921 expand_builtin_cilk_pop_frame (exp);
6922 return const0_rtx;
6923
6924 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6925 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6926 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6927 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6928 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6929 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6930 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6931 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6932 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6933 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6934 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6935 /* We allow user CHKP builtins if Pointer Bounds
6936 Checker is off. */
6937 if (!chkp_function_instrumented_p (current_function_decl))
6938 {
6939 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6940 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6941 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6942 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6943 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6944 return expand_normal (CALL_EXPR_ARG (exp, 0));
6945 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6946 return expand_normal (size_zero_node);
6947 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6948 return expand_normal (size_int (-1));
6949 else
6950 return const0_rtx;
6951 }
6952 /* FALLTHROUGH */
6953
6954 case BUILT_IN_CHKP_BNDMK:
6955 case BUILT_IN_CHKP_BNDSTX:
6956 case BUILT_IN_CHKP_BNDCL:
6957 case BUILT_IN_CHKP_BNDCU:
6958 case BUILT_IN_CHKP_BNDLDX:
6959 case BUILT_IN_CHKP_BNDRET:
6960 case BUILT_IN_CHKP_INTERSECT:
6961 case BUILT_IN_CHKP_NARROW:
6962 case BUILT_IN_CHKP_EXTRACT_LOWER:
6963 case BUILT_IN_CHKP_EXTRACT_UPPER:
6964 /* Software implementation of Pointer Bounds Checker is NYI.
6965 Target support is required. */
6966 error ("Your target platform does not support -fcheck-pointer-bounds");
6967 break;
6968
6969 case BUILT_IN_ACC_ON_DEVICE:
6970 /* Do library call, if we failed to expand the builtin when
6971 folding. */
6972 break;
6973
6974 default: /* just do library call, if unknown builtin */
6975 break;
6976 }
6977
6978 /* The switch statement above can drop through to cause the function
6979 to be called normally. */
6980 return expand_call (exp, target, ignore);
6981 }
6982
6983 /* Similar to expand_builtin but is used for instrumented calls. */
6984
6985 rtx
6986 expand_builtin_with_bounds (tree exp, rtx target,
6987 rtx subtarget ATTRIBUTE_UNUSED,
6988 machine_mode mode, int ignore)
6989 {
6990 tree fndecl = get_callee_fndecl (exp);
6991 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6992
6993 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6994
6995 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6996 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6997
6998 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6999 && fcode < END_CHKP_BUILTINS);
7000
7001 switch (fcode)
7002 {
7003 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7004 target = expand_builtin_memcpy_with_bounds (exp, target);
7005 if (target)
7006 return target;
7007 break;
7008
7009 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7010 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7011 if (target)
7012 return target;
7013 break;
7014
7015 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7016 target = expand_builtin_memset_with_bounds (exp, target, mode);
7017 if (target)
7018 return target;
7019 break;
7020
7021 default:
7022 break;
7023 }
7024
7025 /* The switch statement above can drop through to cause the function
7026 to be called normally. */
7027 return expand_call (exp, target, ignore);
7028 }
7029
7030 /* Determine whether a tree node represents a call to a built-in
7031 function. If the tree T is a call to a built-in function with
7032 the right number of arguments of the appropriate types, return
7033 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7034 Otherwise the return value is END_BUILTINS. */
7035
7036 enum built_in_function
7037 builtin_mathfn_code (const_tree t)
7038 {
7039 const_tree fndecl, arg, parmlist;
7040 const_tree argtype, parmtype;
7041 const_call_expr_arg_iterator iter;
7042
7043 if (TREE_CODE (t) != CALL_EXPR
7044 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7045 return END_BUILTINS;
7046
7047 fndecl = get_callee_fndecl (t);
7048 if (fndecl == NULL_TREE
7049 || TREE_CODE (fndecl) != FUNCTION_DECL
7050 || ! DECL_BUILT_IN (fndecl)
7051 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7052 return END_BUILTINS;
7053
7054 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7055 init_const_call_expr_arg_iterator (t, &iter);
7056 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7057 {
7058 /* If a function doesn't take a variable number of arguments,
7059 the last element in the list will have type `void'. */
7060 parmtype = TREE_VALUE (parmlist);
7061 if (VOID_TYPE_P (parmtype))
7062 {
7063 if (more_const_call_expr_args_p (&iter))
7064 return END_BUILTINS;
7065 return DECL_FUNCTION_CODE (fndecl);
7066 }
7067
7068 if (! more_const_call_expr_args_p (&iter))
7069 return END_BUILTINS;
7070
7071 arg = next_const_call_expr_arg (&iter);
7072 argtype = TREE_TYPE (arg);
7073
7074 if (SCALAR_FLOAT_TYPE_P (parmtype))
7075 {
7076 if (! SCALAR_FLOAT_TYPE_P (argtype))
7077 return END_BUILTINS;
7078 }
7079 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7080 {
7081 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7082 return END_BUILTINS;
7083 }
7084 else if (POINTER_TYPE_P (parmtype))
7085 {
7086 if (! POINTER_TYPE_P (argtype))
7087 return END_BUILTINS;
7088 }
7089 else if (INTEGRAL_TYPE_P (parmtype))
7090 {
7091 if (! INTEGRAL_TYPE_P (argtype))
7092 return END_BUILTINS;
7093 }
7094 else
7095 return END_BUILTINS;
7096 }
7097
7098 /* Variable-length argument list. */
7099 return DECL_FUNCTION_CODE (fndecl);
7100 }
7101
7102 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7103 evaluate to a constant. */
7104
7105 static tree
7106 fold_builtin_constant_p (tree arg)
7107 {
7108 /* We return 1 for a numeric type that's known to be a constant
7109 value at compile-time or for an aggregate type that's a
7110 literal constant. */
7111 STRIP_NOPS (arg);
7112
7113 /* If we know this is a constant, emit the constant of one. */
7114 if (CONSTANT_CLASS_P (arg)
7115 || (TREE_CODE (arg) == CONSTRUCTOR
7116 && TREE_CONSTANT (arg)))
7117 return integer_one_node;
7118 if (TREE_CODE (arg) == ADDR_EXPR)
7119 {
7120 tree op = TREE_OPERAND (arg, 0);
7121 if (TREE_CODE (op) == STRING_CST
7122 || (TREE_CODE (op) == ARRAY_REF
7123 && integer_zerop (TREE_OPERAND (op, 1))
7124 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7125 return integer_one_node;
7126 }
7127
7128 /* If this expression has side effects, show we don't know it to be a
7129 constant. Likewise if it's a pointer or aggregate type since in
7130 those case we only want literals, since those are only optimized
7131 when generating RTL, not later.
7132 And finally, if we are compiling an initializer, not code, we
7133 need to return a definite result now; there's not going to be any
7134 more optimization done. */
7135 if (TREE_SIDE_EFFECTS (arg)
7136 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7137 || POINTER_TYPE_P (TREE_TYPE (arg))
7138 || cfun == 0
7139 || folding_initializer
7140 || force_folding_builtin_constant_p)
7141 return integer_zero_node;
7142
7143 return NULL_TREE;
7144 }
7145
7146 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7147 return it as a truthvalue. */
7148
7149 static tree
7150 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7151 tree predictor)
7152 {
7153 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7154
7155 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7156 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7157 ret_type = TREE_TYPE (TREE_TYPE (fn));
7158 pred_type = TREE_VALUE (arg_types);
7159 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7160
7161 pred = fold_convert_loc (loc, pred_type, pred);
7162 expected = fold_convert_loc (loc, expected_type, expected);
7163 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7164 predictor);
7165
7166 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7167 build_int_cst (ret_type, 0));
7168 }
7169
7170 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7171 NULL_TREE if no simplification is possible. */
7172
7173 tree
7174 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7175 {
7176 tree inner, fndecl, inner_arg0;
7177 enum tree_code code;
7178
7179 /* Distribute the expected value over short-circuiting operators.
7180 See through the cast from truthvalue_type_node to long. */
7181 inner_arg0 = arg0;
7182 while (CONVERT_EXPR_P (inner_arg0)
7183 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7184 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7185 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7186
7187 /* If this is a builtin_expect within a builtin_expect keep the
7188 inner one. See through a comparison against a constant. It
7189 might have been added to create a thruthvalue. */
7190 inner = inner_arg0;
7191
7192 if (COMPARISON_CLASS_P (inner)
7193 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7194 inner = TREE_OPERAND (inner, 0);
7195
7196 if (TREE_CODE (inner) == CALL_EXPR
7197 && (fndecl = get_callee_fndecl (inner))
7198 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7199 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7200 return arg0;
7201
7202 inner = inner_arg0;
7203 code = TREE_CODE (inner);
7204 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7205 {
7206 tree op0 = TREE_OPERAND (inner, 0);
7207 tree op1 = TREE_OPERAND (inner, 1);
7208
7209 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7210 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7211 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7212
7213 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7214 }
7215
7216 /* If the argument isn't invariant then there's nothing else we can do. */
7217 if (!TREE_CONSTANT (inner_arg0))
7218 return NULL_TREE;
7219
7220 /* If we expect that a comparison against the argument will fold to
7221 a constant return the constant. In practice, this means a true
7222 constant or the address of a non-weak symbol. */
7223 inner = inner_arg0;
7224 STRIP_NOPS (inner);
7225 if (TREE_CODE (inner) == ADDR_EXPR)
7226 {
7227 do
7228 {
7229 inner = TREE_OPERAND (inner, 0);
7230 }
7231 while (TREE_CODE (inner) == COMPONENT_REF
7232 || TREE_CODE (inner) == ARRAY_REF);
7233 if ((TREE_CODE (inner) == VAR_DECL
7234 || TREE_CODE (inner) == FUNCTION_DECL)
7235 && DECL_WEAK (inner))
7236 return NULL_TREE;
7237 }
7238
7239 /* Otherwise, ARG0 already has the proper type for the return value. */
7240 return arg0;
7241 }
7242
7243 /* Fold a call to __builtin_classify_type with argument ARG. */
7244
7245 static tree
7246 fold_builtin_classify_type (tree arg)
7247 {
7248 if (arg == 0)
7249 return build_int_cst (integer_type_node, no_type_class);
7250
7251 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7252 }
7253
7254 /* Fold a call to __builtin_strlen with argument ARG. */
7255
7256 static tree
7257 fold_builtin_strlen (location_t loc, tree type, tree arg)
7258 {
7259 if (!validate_arg (arg, POINTER_TYPE))
7260 return NULL_TREE;
7261 else
7262 {
7263 tree len = c_strlen (arg, 0);
7264
7265 if (len)
7266 return fold_convert_loc (loc, type, len);
7267
7268 return NULL_TREE;
7269 }
7270 }
7271
7272 /* If ARG is a foldable constant real, use FN to round it to an integer
7273 value and try to represent the result in integer type ITYPE. Return
7274 the value on success, otherwise return null. */
7275
7276 static tree
7277 do_real_to_int_conversion (tree itype, tree arg,
7278 void (*fn) (REAL_VALUE_TYPE *, machine_mode,
7279 const REAL_VALUE_TYPE *))
7280 {
7281 if (TREE_CODE (arg) != REAL_CST || TREE_OVERFLOW (arg))
7282 return NULL_TREE;
7283
7284 const REAL_VALUE_TYPE *value = TREE_REAL_CST_PTR (arg);
7285 if (!real_isfinite (value))
7286 return NULL_TREE;
7287
7288 tree ftype = TREE_TYPE (arg);
7289 REAL_VALUE_TYPE rounded;
7290 fn (&rounded, TYPE_MODE (ftype), value);
7291
7292 bool fail = false;
7293 wide_int ival = real_to_integer (&rounded, &fail, TYPE_PRECISION (itype));
7294 if (fail)
7295 return NULL_TREE;
7296
7297 return wide_int_to_tree (itype, ival);
7298 }
7299
7300
7301 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7302
7303 static tree
7304 fold_builtin_inf (location_t loc, tree type, int warn)
7305 {
7306 REAL_VALUE_TYPE real;
7307
7308 /* __builtin_inff is intended to be usable to define INFINITY on all
7309 targets. If an infinity is not available, INFINITY expands "to a
7310 positive constant of type float that overflows at translation
7311 time", footnote "In this case, using INFINITY will violate the
7312 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7313 Thus we pedwarn to ensure this constraint violation is
7314 diagnosed. */
7315 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7316 pedwarn (loc, 0, "target format does not support infinity");
7317
7318 real_inf (&real);
7319 return build_real (type, real);
7320 }
7321
7322 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7323
7324 static tree
7325 fold_builtin_nan (tree arg, tree type, int quiet)
7326 {
7327 REAL_VALUE_TYPE real;
7328 const char *str;
7329
7330 if (!validate_arg (arg, POINTER_TYPE))
7331 return NULL_TREE;
7332 str = c_getstr (arg);
7333 if (!str)
7334 return NULL_TREE;
7335
7336 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7337 return NULL_TREE;
7338
7339 return build_real (type, real);
7340 }
7341
7342 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7343 NULL_TREE if no simplification can be made. */
7344
7345 static tree
7346 fold_builtin_sincos (location_t loc,
7347 tree arg0, tree arg1, tree arg2)
7348 {
7349 tree type;
7350 tree res, fn, call;
7351
7352 if (!validate_arg (arg0, REAL_TYPE)
7353 || !validate_arg (arg1, POINTER_TYPE)
7354 || !validate_arg (arg2, POINTER_TYPE))
7355 return NULL_TREE;
7356
7357 type = TREE_TYPE (arg0);
7358
7359 /* Calculate the result when the argument is a constant. */
7360 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7361 return res;
7362
7363 /* Canonicalize sincos to cexpi. */
7364 if (!targetm.libc_has_function (function_c99_math_complex))
7365 return NULL_TREE;
7366 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7367 if (!fn)
7368 return NULL_TREE;
7369
7370 call = build_call_expr_loc (loc, fn, 1, arg0);
7371 call = builtin_save_expr (call);
7372
7373 return build2 (COMPOUND_EXPR, void_type_node,
7374 build2 (MODIFY_EXPR, void_type_node,
7375 build_fold_indirect_ref_loc (loc, arg1),
7376 build1 (IMAGPART_EXPR, type, call)),
7377 build2 (MODIFY_EXPR, void_type_node,
7378 build_fold_indirect_ref_loc (loc, arg2),
7379 build1 (REALPART_EXPR, type, call)));
7380 }
7381
7382 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7383 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7384 the argument to the call. Return NULL_TREE if no simplification can
7385 be made. */
7386
7387 static tree
7388 fold_builtin_bitop (tree fndecl, tree arg)
7389 {
7390 if (!validate_arg (arg, INTEGER_TYPE))
7391 return NULL_TREE;
7392
7393 /* Optimize for constant argument. */
7394 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7395 {
7396 tree type = TREE_TYPE (arg);
7397 int result;
7398
7399 switch (DECL_FUNCTION_CODE (fndecl))
7400 {
7401 CASE_INT_FN (BUILT_IN_FFS):
7402 result = wi::ffs (arg);
7403 break;
7404
7405 CASE_INT_FN (BUILT_IN_CLZ):
7406 if (wi::ne_p (arg, 0))
7407 result = wi::clz (arg);
7408 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7409 result = TYPE_PRECISION (type);
7410 break;
7411
7412 CASE_INT_FN (BUILT_IN_CTZ):
7413 if (wi::ne_p (arg, 0))
7414 result = wi::ctz (arg);
7415 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7416 result = TYPE_PRECISION (type);
7417 break;
7418
7419 CASE_INT_FN (BUILT_IN_CLRSB):
7420 result = wi::clrsb (arg);
7421 break;
7422
7423 CASE_INT_FN (BUILT_IN_POPCOUNT):
7424 result = wi::popcount (arg);
7425 break;
7426
7427 CASE_INT_FN (BUILT_IN_PARITY):
7428 result = wi::parity (arg);
7429 break;
7430
7431 default:
7432 gcc_unreachable ();
7433 }
7434
7435 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7436 }
7437
7438 return NULL_TREE;
7439 }
7440
7441 /* Fold function call to builtin_bswap and the short, long and long long
7442 variants. Return NULL_TREE if no simplification can be made. */
7443 static tree
7444 fold_builtin_bswap (tree fndecl, tree arg)
7445 {
7446 if (! validate_arg (arg, INTEGER_TYPE))
7447 return NULL_TREE;
7448
7449 /* Optimize constant value. */
7450 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7451 {
7452 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7453
7454 switch (DECL_FUNCTION_CODE (fndecl))
7455 {
7456 case BUILT_IN_BSWAP16:
7457 case BUILT_IN_BSWAP32:
7458 case BUILT_IN_BSWAP64:
7459 {
7460 signop sgn = TYPE_SIGN (type);
7461 tree result =
7462 wide_int_to_tree (type,
7463 wide_int::from (arg, TYPE_PRECISION (type),
7464 sgn).bswap ());
7465 return result;
7466 }
7467 default:
7468 gcc_unreachable ();
7469 }
7470 }
7471
7472 return NULL_TREE;
7473 }
7474
7475 /* Fold a builtin function call to pow, powf, or powl. Return
7476 NULL_TREE if no simplification can be made. */
7477 static tree
7478 fold_const_builtin_pow (tree arg0, tree arg1, tree type)
7479 {
7480 tree res;
7481
7482 if (!validate_arg (arg0, REAL_TYPE)
7483 || !validate_arg (arg1, REAL_TYPE))
7484 return NULL_TREE;
7485
7486 /* Calculate the result when the argument is a constant. */
7487 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7488 return res;
7489
7490 /* Check for an integer exponent. */
7491 if (TREE_CODE (arg0) == REAL_CST
7492 && !TREE_OVERFLOW (arg0)
7493 && TREE_CODE (arg1) == REAL_CST
7494 && !TREE_OVERFLOW (arg1))
7495 {
7496 REAL_VALUE_TYPE cint1;
7497 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (arg0);
7498 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (arg1);
7499 HOST_WIDE_INT n1 = real_to_integer (c1);
7500 real_from_integer (&cint1, VOIDmode, n1, SIGNED);
7501 /* Attempt to evaluate pow at compile-time, unless this should
7502 raise an exception. */
7503 if (real_identical (c1, &cint1)
7504 && (n1 > 0
7505 || (!flag_trapping_math && !flag_errno_math)
7506 || !real_equal (c0, &dconst0)))
7507 {
7508 REAL_VALUE_TYPE x;
7509 bool inexact = real_powi (&x, TYPE_MODE (type), c0, n1);
7510 if (flag_unsafe_math_optimizations || !inexact)
7511 return build_real (type, x);
7512 }
7513 }
7514
7515 return NULL_TREE;
7516 }
7517
7518 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7519 arguments to the call, and TYPE is its return type.
7520 Return NULL_TREE if no simplification can be made. */
7521
7522 static tree
7523 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7524 {
7525 if (!validate_arg (arg1, POINTER_TYPE)
7526 || !validate_arg (arg2, INTEGER_TYPE)
7527 || !validate_arg (len, INTEGER_TYPE))
7528 return NULL_TREE;
7529 else
7530 {
7531 const char *p1;
7532
7533 if (TREE_CODE (arg2) != INTEGER_CST
7534 || !tree_fits_uhwi_p (len))
7535 return NULL_TREE;
7536
7537 p1 = c_getstr (arg1);
7538 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7539 {
7540 char c;
7541 const char *r;
7542 tree tem;
7543
7544 if (target_char_cast (arg2, &c))
7545 return NULL_TREE;
7546
7547 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7548
7549 if (r == NULL)
7550 return build_int_cst (TREE_TYPE (arg1), 0);
7551
7552 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7553 return fold_convert_loc (loc, type, tem);
7554 }
7555 return NULL_TREE;
7556 }
7557 }
7558
7559 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7560 Return NULL_TREE if no simplification can be made. */
7561
7562 static tree
7563 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7564 {
7565 const char *p1, *p2;
7566
7567 if (!validate_arg (arg1, POINTER_TYPE)
7568 || !validate_arg (arg2, POINTER_TYPE)
7569 || !validate_arg (len, INTEGER_TYPE))
7570 return NULL_TREE;
7571
7572 /* If the LEN parameter is zero, return zero. */
7573 if (integer_zerop (len))
7574 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7575 arg1, arg2);
7576
7577 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7578 if (operand_equal_p (arg1, arg2, 0))
7579 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7580
7581 p1 = c_getstr (arg1);
7582 p2 = c_getstr (arg2);
7583
7584 /* If all arguments are constant, and the value of len is not greater
7585 than the lengths of arg1 and arg2, evaluate at compile-time. */
7586 if (tree_fits_uhwi_p (len) && p1 && p2
7587 && compare_tree_int (len, strlen (p1) + 1) <= 0
7588 && compare_tree_int (len, strlen (p2) + 1) <= 0)
7589 {
7590 const int r = memcmp (p1, p2, tree_to_uhwi (len));
7591
7592 if (r > 0)
7593 return integer_one_node;
7594 else if (r < 0)
7595 return integer_minus_one_node;
7596 else
7597 return integer_zero_node;
7598 }
7599
7600 /* If len parameter is one, return an expression corresponding to
7601 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7602 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7603 {
7604 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7605 tree cst_uchar_ptr_node
7606 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7607
7608 tree ind1
7609 = fold_convert_loc (loc, integer_type_node,
7610 build1 (INDIRECT_REF, cst_uchar_node,
7611 fold_convert_loc (loc,
7612 cst_uchar_ptr_node,
7613 arg1)));
7614 tree ind2
7615 = fold_convert_loc (loc, integer_type_node,
7616 build1 (INDIRECT_REF, cst_uchar_node,
7617 fold_convert_loc (loc,
7618 cst_uchar_ptr_node,
7619 arg2)));
7620 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7621 }
7622
7623 return NULL_TREE;
7624 }
7625
7626 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7627 Return NULL_TREE if no simplification can be made. */
7628
7629 static tree
7630 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7631 {
7632 const char *p1, *p2;
7633
7634 if (!validate_arg (arg1, POINTER_TYPE)
7635 || !validate_arg (arg2, POINTER_TYPE))
7636 return NULL_TREE;
7637
7638 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7639 if (operand_equal_p (arg1, arg2, 0))
7640 return integer_zero_node;
7641
7642 p1 = c_getstr (arg1);
7643 p2 = c_getstr (arg2);
7644
7645 if (p1 && p2)
7646 {
7647 const int i = strcmp (p1, p2);
7648 if (i < 0)
7649 return integer_minus_one_node;
7650 else if (i > 0)
7651 return integer_one_node;
7652 else
7653 return integer_zero_node;
7654 }
7655
7656 /* If the second arg is "", return *(const unsigned char*)arg1. */
7657 if (p2 && *p2 == '\0')
7658 {
7659 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7660 tree cst_uchar_ptr_node
7661 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7662
7663 return fold_convert_loc (loc, integer_type_node,
7664 build1 (INDIRECT_REF, cst_uchar_node,
7665 fold_convert_loc (loc,
7666 cst_uchar_ptr_node,
7667 arg1)));
7668 }
7669
7670 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7671 if (p1 && *p1 == '\0')
7672 {
7673 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7674 tree cst_uchar_ptr_node
7675 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7676
7677 tree temp
7678 = fold_convert_loc (loc, integer_type_node,
7679 build1 (INDIRECT_REF, cst_uchar_node,
7680 fold_convert_loc (loc,
7681 cst_uchar_ptr_node,
7682 arg2)));
7683 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7684 }
7685
7686 return NULL_TREE;
7687 }
7688
7689 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7690 Return NULL_TREE if no simplification can be made. */
7691
7692 static tree
7693 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7694 {
7695 const char *p1, *p2;
7696
7697 if (!validate_arg (arg1, POINTER_TYPE)
7698 || !validate_arg (arg2, POINTER_TYPE)
7699 || !validate_arg (len, INTEGER_TYPE))
7700 return NULL_TREE;
7701
7702 /* If the LEN parameter is zero, return zero. */
7703 if (integer_zerop (len))
7704 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7705 arg1, arg2);
7706
7707 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7708 if (operand_equal_p (arg1, arg2, 0))
7709 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7710
7711 p1 = c_getstr (arg1);
7712 p2 = c_getstr (arg2);
7713
7714 if (tree_fits_uhwi_p (len) && p1 && p2)
7715 {
7716 const int i = strncmp (p1, p2, tree_to_uhwi (len));
7717 if (i > 0)
7718 return integer_one_node;
7719 else if (i < 0)
7720 return integer_minus_one_node;
7721 else
7722 return integer_zero_node;
7723 }
7724
7725 /* If the second arg is "", and the length is greater than zero,
7726 return *(const unsigned char*)arg1. */
7727 if (p2 && *p2 == '\0'
7728 && TREE_CODE (len) == INTEGER_CST
7729 && tree_int_cst_sgn (len) == 1)
7730 {
7731 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7732 tree cst_uchar_ptr_node
7733 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7734
7735 return fold_convert_loc (loc, integer_type_node,
7736 build1 (INDIRECT_REF, cst_uchar_node,
7737 fold_convert_loc (loc,
7738 cst_uchar_ptr_node,
7739 arg1)));
7740 }
7741
7742 /* If the first arg is "", and the length is greater than zero,
7743 return -*(const unsigned char*)arg2. */
7744 if (p1 && *p1 == '\0'
7745 && TREE_CODE (len) == INTEGER_CST
7746 && tree_int_cst_sgn (len) == 1)
7747 {
7748 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7749 tree cst_uchar_ptr_node
7750 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7751
7752 tree temp = fold_convert_loc (loc, integer_type_node,
7753 build1 (INDIRECT_REF, cst_uchar_node,
7754 fold_convert_loc (loc,
7755 cst_uchar_ptr_node,
7756 arg2)));
7757 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7758 }
7759
7760 /* If len parameter is one, return an expression corresponding to
7761 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7762 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7763 {
7764 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7765 tree cst_uchar_ptr_node
7766 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7767
7768 tree ind1 = fold_convert_loc (loc, integer_type_node,
7769 build1 (INDIRECT_REF, cst_uchar_node,
7770 fold_convert_loc (loc,
7771 cst_uchar_ptr_node,
7772 arg1)));
7773 tree ind2 = fold_convert_loc (loc, integer_type_node,
7774 build1 (INDIRECT_REF, cst_uchar_node,
7775 fold_convert_loc (loc,
7776 cst_uchar_ptr_node,
7777 arg2)));
7778 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7779 }
7780
7781 return NULL_TREE;
7782 }
7783
7784 /* Fold a call to builtin isascii with argument ARG. */
7785
7786 static tree
7787 fold_builtin_isascii (location_t loc, tree arg)
7788 {
7789 if (!validate_arg (arg, INTEGER_TYPE))
7790 return NULL_TREE;
7791 else
7792 {
7793 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7794 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7795 build_int_cst (integer_type_node,
7796 ~ (unsigned HOST_WIDE_INT) 0x7f));
7797 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7798 arg, integer_zero_node);
7799 }
7800 }
7801
7802 /* Fold a call to builtin toascii with argument ARG. */
7803
7804 static tree
7805 fold_builtin_toascii (location_t loc, tree arg)
7806 {
7807 if (!validate_arg (arg, INTEGER_TYPE))
7808 return NULL_TREE;
7809
7810 /* Transform toascii(c) -> (c & 0x7f). */
7811 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7812 build_int_cst (integer_type_node, 0x7f));
7813 }
7814
7815 /* Fold a call to builtin isdigit with argument ARG. */
7816
7817 static tree
7818 fold_builtin_isdigit (location_t loc, tree arg)
7819 {
7820 if (!validate_arg (arg, INTEGER_TYPE))
7821 return NULL_TREE;
7822 else
7823 {
7824 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7825 /* According to the C standard, isdigit is unaffected by locale.
7826 However, it definitely is affected by the target character set. */
7827 unsigned HOST_WIDE_INT target_digit0
7828 = lang_hooks.to_target_charset ('0');
7829
7830 if (target_digit0 == 0)
7831 return NULL_TREE;
7832
7833 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7834 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7835 build_int_cst (unsigned_type_node, target_digit0));
7836 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7837 build_int_cst (unsigned_type_node, 9));
7838 }
7839 }
7840
7841 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7842
7843 static tree
7844 fold_builtin_fabs (location_t loc, tree arg, tree type)
7845 {
7846 if (!validate_arg (arg, REAL_TYPE))
7847 return NULL_TREE;
7848
7849 arg = fold_convert_loc (loc, type, arg);
7850 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7851 }
7852
7853 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7854
7855 static tree
7856 fold_builtin_abs (location_t loc, tree arg, tree type)
7857 {
7858 if (!validate_arg (arg, INTEGER_TYPE))
7859 return NULL_TREE;
7860
7861 arg = fold_convert_loc (loc, type, arg);
7862 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7863 }
7864
7865 /* Fold a fma operation with arguments ARG[012]. */
7866
7867 tree
7868 fold_fma (location_t loc ATTRIBUTE_UNUSED,
7869 tree type, tree arg0, tree arg1, tree arg2)
7870 {
7871 if (TREE_CODE (arg0) == REAL_CST
7872 && TREE_CODE (arg1) == REAL_CST
7873 && TREE_CODE (arg2) == REAL_CST)
7874 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
7875
7876 return NULL_TREE;
7877 }
7878
7879 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7880
7881 static tree
7882 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7883 {
7884 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7885 if (validate_arg (arg0, REAL_TYPE)
7886 && validate_arg (arg1, REAL_TYPE)
7887 && validate_arg (arg2, REAL_TYPE)
7888 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7889 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7890
7891 return NULL_TREE;
7892 }
7893
7894 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7895
7896 static tree
7897 fold_builtin_carg (location_t loc, tree arg, tree type)
7898 {
7899 if (validate_arg (arg, COMPLEX_TYPE)
7900 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7901 {
7902 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7903
7904 if (atan2_fn)
7905 {
7906 tree new_arg = builtin_save_expr (arg);
7907 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7908 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7909 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7910 }
7911 }
7912
7913 return NULL_TREE;
7914 }
7915
7916 /* Fold a call to builtin logb/ilogb. */
7917
7918 static tree
7919 fold_const_builtin_logb (location_t loc, tree arg, tree rettype)
7920 {
7921 if (! validate_arg (arg, REAL_TYPE))
7922 return NULL_TREE;
7923
7924 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
7925 {
7926 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
7927
7928 switch (value->cl)
7929 {
7930 case rvc_nan:
7931 case rvc_inf:
7932 /* If arg is Inf or NaN and we're logb, return it. */
7933 if (TREE_CODE (rettype) == REAL_TYPE)
7934 {
7935 /* For logb(-Inf) we have to return +Inf. */
7936 if (real_isinf (value) && real_isneg (value))
7937 {
7938 REAL_VALUE_TYPE tem;
7939 real_inf (&tem);
7940 return build_real (rettype, tem);
7941 }
7942 return fold_convert_loc (loc, rettype, arg);
7943 }
7944 /* Fall through... */
7945 case rvc_zero:
7946 /* Zero may set errno and/or raise an exception for logb, also
7947 for ilogb we don't know FP_ILOGB0. */
7948 return NULL_TREE;
7949 case rvc_normal:
7950 /* For normal numbers, proceed iff radix == 2. In GCC,
7951 normalized significands are in the range [0.5, 1.0). We
7952 want the exponent as if they were [1.0, 2.0) so get the
7953 exponent and subtract 1. */
7954 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
7955 return fold_convert_loc (loc, rettype,
7956 build_int_cst (integer_type_node,
7957 REAL_EXP (value)-1));
7958 break;
7959 }
7960 }
7961
7962 return NULL_TREE;
7963 }
7964
7965 /* Fold a call to builtin significand, if radix == 2. */
7966
7967 static tree
7968 fold_const_builtin_significand (location_t loc, tree arg, tree rettype)
7969 {
7970 if (! validate_arg (arg, REAL_TYPE))
7971 return NULL_TREE;
7972
7973 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
7974 {
7975 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
7976
7977 switch (value->cl)
7978 {
7979 case rvc_zero:
7980 case rvc_nan:
7981 case rvc_inf:
7982 /* If arg is +-0, +-Inf or +-NaN, then return it. */
7983 return fold_convert_loc (loc, rettype, arg);
7984 case rvc_normal:
7985 /* For normal numbers, proceed iff radix == 2. */
7986 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
7987 {
7988 REAL_VALUE_TYPE result = *value;
7989 /* In GCC, normalized significands are in the range [0.5,
7990 1.0). We want them to be [1.0, 2.0) so set the
7991 exponent to 1. */
7992 SET_REAL_EXP (&result, 1);
7993 return build_real (rettype, result);
7994 }
7995 break;
7996 }
7997 }
7998
7999 return NULL_TREE;
8000 }
8001
8002 /* Fold a call to builtin frexp, we can assume the base is 2. */
8003
8004 static tree
8005 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8006 {
8007 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8008 return NULL_TREE;
8009
8010 STRIP_NOPS (arg0);
8011
8012 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8013 return NULL_TREE;
8014
8015 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8016
8017 /* Proceed if a valid pointer type was passed in. */
8018 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8019 {
8020 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8021 tree frac, exp;
8022
8023 switch (value->cl)
8024 {
8025 case rvc_zero:
8026 /* For +-0, return (*exp = 0, +-0). */
8027 exp = integer_zero_node;
8028 frac = arg0;
8029 break;
8030 case rvc_nan:
8031 case rvc_inf:
8032 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8033 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8034 case rvc_normal:
8035 {
8036 /* Since the frexp function always expects base 2, and in
8037 GCC normalized significands are already in the range
8038 [0.5, 1.0), we have exactly what frexp wants. */
8039 REAL_VALUE_TYPE frac_rvt = *value;
8040 SET_REAL_EXP (&frac_rvt, 0);
8041 frac = build_real (rettype, frac_rvt);
8042 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8043 }
8044 break;
8045 default:
8046 gcc_unreachable ();
8047 }
8048
8049 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8050 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8051 TREE_SIDE_EFFECTS (arg1) = 1;
8052 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8053 }
8054
8055 return NULL_TREE;
8056 }
8057
8058 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8059 then we can assume the base is two. If it's false, then we have to
8060 check the mode of the TYPE parameter in certain cases. */
8061
8062 static tree
8063 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
8064 tree type, bool ldexp)
8065 {
8066 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
8067 {
8068 STRIP_NOPS (arg0);
8069 STRIP_NOPS (arg1);
8070
8071 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
8072 if (real_zerop (arg0) || integer_zerop (arg1)
8073 || (TREE_CODE (arg0) == REAL_CST
8074 && !real_isfinite (&TREE_REAL_CST (arg0))))
8075 return omit_one_operand_loc (loc, type, arg0, arg1);
8076
8077 /* If both arguments are constant, then try to evaluate it. */
8078 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
8079 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
8080 && tree_fits_shwi_p (arg1))
8081 {
8082 /* Bound the maximum adjustment to twice the range of the
8083 mode's valid exponents. Use abs to ensure the range is
8084 positive as a sanity check. */
8085 const long max_exp_adj = 2 *
8086 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
8087 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
8088
8089 /* Get the user-requested adjustment. */
8090 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
8091
8092 /* The requested adjustment must be inside this range. This
8093 is a preliminary cap to avoid things like overflow, we
8094 may still fail to compute the result for other reasons. */
8095 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
8096 {
8097 REAL_VALUE_TYPE initial_result;
8098
8099 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
8100
8101 /* Ensure we didn't overflow. */
8102 if (! real_isinf (&initial_result))
8103 {
8104 const REAL_VALUE_TYPE trunc_result
8105 = real_value_truncate (TYPE_MODE (type), initial_result);
8106
8107 /* Only proceed if the target mode can hold the
8108 resulting value. */
8109 if (real_equal (&initial_result, &trunc_result))
8110 return build_real (type, trunc_result);
8111 }
8112 }
8113 }
8114 }
8115
8116 return NULL_TREE;
8117 }
8118
8119 /* Fold a call to builtin modf. */
8120
8121 static tree
8122 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8123 {
8124 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8125 return NULL_TREE;
8126
8127 STRIP_NOPS (arg0);
8128
8129 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8130 return NULL_TREE;
8131
8132 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8133
8134 /* Proceed if a valid pointer type was passed in. */
8135 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8136 {
8137 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8138 REAL_VALUE_TYPE trunc, frac;
8139
8140 switch (value->cl)
8141 {
8142 case rvc_nan:
8143 case rvc_zero:
8144 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8145 trunc = frac = *value;
8146 break;
8147 case rvc_inf:
8148 /* For +-Inf, return (*arg1 = arg0, +-0). */
8149 frac = dconst0;
8150 frac.sign = value->sign;
8151 trunc = *value;
8152 break;
8153 case rvc_normal:
8154 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8155 real_trunc (&trunc, VOIDmode, value);
8156 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8157 /* If the original number was negative and already
8158 integral, then the fractional part is -0.0. */
8159 if (value->sign && frac.cl == rvc_zero)
8160 frac.sign = value->sign;
8161 break;
8162 }
8163
8164 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8165 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8166 build_real (rettype, trunc));
8167 TREE_SIDE_EFFECTS (arg1) = 1;
8168 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8169 build_real (rettype, frac));
8170 }
8171
8172 return NULL_TREE;
8173 }
8174
8175 /* Given a location LOC, an interclass builtin function decl FNDECL
8176 and its single argument ARG, return an folded expression computing
8177 the same, or NULL_TREE if we either couldn't or didn't want to fold
8178 (the latter happen if there's an RTL instruction available). */
8179
8180 static tree
8181 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8182 {
8183 machine_mode mode;
8184
8185 if (!validate_arg (arg, REAL_TYPE))
8186 return NULL_TREE;
8187
8188 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8189 return NULL_TREE;
8190
8191 mode = TYPE_MODE (TREE_TYPE (arg));
8192
8193 /* If there is no optab, try generic code. */
8194 switch (DECL_FUNCTION_CODE (fndecl))
8195 {
8196 tree result;
8197
8198 CASE_FLT_FN (BUILT_IN_ISINF):
8199 {
8200 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8201 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8202 tree const type = TREE_TYPE (arg);
8203 REAL_VALUE_TYPE r;
8204 char buf[128];
8205
8206 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8207 real_from_string (&r, buf);
8208 result = build_call_expr (isgr_fn, 2,
8209 fold_build1_loc (loc, ABS_EXPR, type, arg),
8210 build_real (type, r));
8211 return result;
8212 }
8213 CASE_FLT_FN (BUILT_IN_FINITE):
8214 case BUILT_IN_ISFINITE:
8215 {
8216 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8217 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8218 tree const type = TREE_TYPE (arg);
8219 REAL_VALUE_TYPE r;
8220 char buf[128];
8221
8222 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8223 real_from_string (&r, buf);
8224 result = build_call_expr (isle_fn, 2,
8225 fold_build1_loc (loc, ABS_EXPR, type, arg),
8226 build_real (type, r));
8227 /*result = fold_build2_loc (loc, UNGT_EXPR,
8228 TREE_TYPE (TREE_TYPE (fndecl)),
8229 fold_build1_loc (loc, ABS_EXPR, type, arg),
8230 build_real (type, r));
8231 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8232 TREE_TYPE (TREE_TYPE (fndecl)),
8233 result);*/
8234 return result;
8235 }
8236 case BUILT_IN_ISNORMAL:
8237 {
8238 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8239 islessequal(fabs(x),DBL_MAX). */
8240 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8241 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8242 tree const type = TREE_TYPE (arg);
8243 REAL_VALUE_TYPE rmax, rmin;
8244 char buf[128];
8245
8246 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8247 real_from_string (&rmax, buf);
8248 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8249 real_from_string (&rmin, buf);
8250 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8251 result = build_call_expr (isle_fn, 2, arg,
8252 build_real (type, rmax));
8253 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
8254 build_call_expr (isge_fn, 2, arg,
8255 build_real (type, rmin)));
8256 return result;
8257 }
8258 default:
8259 break;
8260 }
8261
8262 return NULL_TREE;
8263 }
8264
8265 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8266 ARG is the argument for the call. */
8267
8268 static tree
8269 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8270 {
8271 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8272 REAL_VALUE_TYPE r;
8273
8274 if (!validate_arg (arg, REAL_TYPE))
8275 return NULL_TREE;
8276
8277 switch (builtin_index)
8278 {
8279 case BUILT_IN_ISINF:
8280 if (!HONOR_INFINITIES (arg))
8281 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8282
8283 if (TREE_CODE (arg) == REAL_CST)
8284 {
8285 r = TREE_REAL_CST (arg);
8286 if (real_isinf (&r))
8287 return real_compare (GT_EXPR, &r, &dconst0)
8288 ? integer_one_node : integer_minus_one_node;
8289 else
8290 return integer_zero_node;
8291 }
8292
8293 return NULL_TREE;
8294
8295 case BUILT_IN_ISINF_SIGN:
8296 {
8297 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8298 /* In a boolean context, GCC will fold the inner COND_EXPR to
8299 1. So e.g. "if (isinf_sign(x))" would be folded to just
8300 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8301 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
8302 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8303 tree tmp = NULL_TREE;
8304
8305 arg = builtin_save_expr (arg);
8306
8307 if (signbit_fn && isinf_fn)
8308 {
8309 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8310 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8311
8312 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8313 signbit_call, integer_zero_node);
8314 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8315 isinf_call, integer_zero_node);
8316
8317 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8318 integer_minus_one_node, integer_one_node);
8319 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8320 isinf_call, tmp,
8321 integer_zero_node);
8322 }
8323
8324 return tmp;
8325 }
8326
8327 case BUILT_IN_ISFINITE:
8328 if (!HONOR_NANS (arg)
8329 && !HONOR_INFINITIES (arg))
8330 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8331
8332 if (TREE_CODE (arg) == REAL_CST)
8333 {
8334 r = TREE_REAL_CST (arg);
8335 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
8336 }
8337
8338 return NULL_TREE;
8339
8340 case BUILT_IN_ISNAN:
8341 if (!HONOR_NANS (arg))
8342 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8343
8344 if (TREE_CODE (arg) == REAL_CST)
8345 {
8346 r = TREE_REAL_CST (arg);
8347 return real_isnan (&r) ? integer_one_node : integer_zero_node;
8348 }
8349
8350 arg = builtin_save_expr (arg);
8351 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8352
8353 default:
8354 gcc_unreachable ();
8355 }
8356 }
8357
8358 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8359 This builtin will generate code to return the appropriate floating
8360 point classification depending on the value of the floating point
8361 number passed in. The possible return values must be supplied as
8362 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8363 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8364 one floating point argument which is "type generic". */
8365
8366 static tree
8367 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8368 {
8369 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8370 arg, type, res, tmp;
8371 machine_mode mode;
8372 REAL_VALUE_TYPE r;
8373 char buf[128];
8374
8375 /* Verify the required arguments in the original call. */
8376 if (nargs != 6
8377 || !validate_arg (args[0], INTEGER_TYPE)
8378 || !validate_arg (args[1], INTEGER_TYPE)
8379 || !validate_arg (args[2], INTEGER_TYPE)
8380 || !validate_arg (args[3], INTEGER_TYPE)
8381 || !validate_arg (args[4], INTEGER_TYPE)
8382 || !validate_arg (args[5], REAL_TYPE))
8383 return NULL_TREE;
8384
8385 fp_nan = args[0];
8386 fp_infinite = args[1];
8387 fp_normal = args[2];
8388 fp_subnormal = args[3];
8389 fp_zero = args[4];
8390 arg = args[5];
8391 type = TREE_TYPE (arg);
8392 mode = TYPE_MODE (type);
8393 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8394
8395 /* fpclassify(x) ->
8396 isnan(x) ? FP_NAN :
8397 (fabs(x) == Inf ? FP_INFINITE :
8398 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8399 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8400
8401 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8402 build_real (type, dconst0));
8403 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8404 tmp, fp_zero, fp_subnormal);
8405
8406 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8407 real_from_string (&r, buf);
8408 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8409 arg, build_real (type, r));
8410 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8411
8412 if (HONOR_INFINITIES (mode))
8413 {
8414 real_inf (&r);
8415 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8416 build_real (type, r));
8417 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8418 fp_infinite, res);
8419 }
8420
8421 if (HONOR_NANS (mode))
8422 {
8423 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8424 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8425 }
8426
8427 return res;
8428 }
8429
8430 /* Fold a call to an unordered comparison function such as
8431 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8432 being called and ARG0 and ARG1 are the arguments for the call.
8433 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8434 the opposite of the desired result. UNORDERED_CODE is used
8435 for modes that can hold NaNs and ORDERED_CODE is used for
8436 the rest. */
8437
8438 static tree
8439 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8440 enum tree_code unordered_code,
8441 enum tree_code ordered_code)
8442 {
8443 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8444 enum tree_code code;
8445 tree type0, type1;
8446 enum tree_code code0, code1;
8447 tree cmp_type = NULL_TREE;
8448
8449 type0 = TREE_TYPE (arg0);
8450 type1 = TREE_TYPE (arg1);
8451
8452 code0 = TREE_CODE (type0);
8453 code1 = TREE_CODE (type1);
8454
8455 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8456 /* Choose the wider of two real types. */
8457 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8458 ? type0 : type1;
8459 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8460 cmp_type = type0;
8461 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8462 cmp_type = type1;
8463
8464 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8465 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8466
8467 if (unordered_code == UNORDERED_EXPR)
8468 {
8469 if (!HONOR_NANS (arg0))
8470 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8471 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8472 }
8473
8474 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8475 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8476 fold_build2_loc (loc, code, type, arg0, arg1));
8477 }
8478
8479 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8480 arithmetics if it can never overflow, or into internal functions that
8481 return both result of arithmetics and overflowed boolean flag in
8482 a complex integer result, or some other check for overflow. */
8483
8484 static tree
8485 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8486 tree arg0, tree arg1, tree arg2)
8487 {
8488 enum internal_fn ifn = IFN_LAST;
8489 tree type = TREE_TYPE (TREE_TYPE (arg2));
8490 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8491 switch (fcode)
8492 {
8493 case BUILT_IN_ADD_OVERFLOW:
8494 case BUILT_IN_SADD_OVERFLOW:
8495 case BUILT_IN_SADDL_OVERFLOW:
8496 case BUILT_IN_SADDLL_OVERFLOW:
8497 case BUILT_IN_UADD_OVERFLOW:
8498 case BUILT_IN_UADDL_OVERFLOW:
8499 case BUILT_IN_UADDLL_OVERFLOW:
8500 ifn = IFN_ADD_OVERFLOW;
8501 break;
8502 case BUILT_IN_SUB_OVERFLOW:
8503 case BUILT_IN_SSUB_OVERFLOW:
8504 case BUILT_IN_SSUBL_OVERFLOW:
8505 case BUILT_IN_SSUBLL_OVERFLOW:
8506 case BUILT_IN_USUB_OVERFLOW:
8507 case BUILT_IN_USUBL_OVERFLOW:
8508 case BUILT_IN_USUBLL_OVERFLOW:
8509 ifn = IFN_SUB_OVERFLOW;
8510 break;
8511 case BUILT_IN_MUL_OVERFLOW:
8512 case BUILT_IN_SMUL_OVERFLOW:
8513 case BUILT_IN_SMULL_OVERFLOW:
8514 case BUILT_IN_SMULLL_OVERFLOW:
8515 case BUILT_IN_UMUL_OVERFLOW:
8516 case BUILT_IN_UMULL_OVERFLOW:
8517 case BUILT_IN_UMULLL_OVERFLOW:
8518 ifn = IFN_MUL_OVERFLOW;
8519 break;
8520 default:
8521 gcc_unreachable ();
8522 }
8523 tree ctype = build_complex_type (type);
8524 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8525 2, arg0, arg1);
8526 tree tgt = save_expr (call);
8527 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8528 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8529 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8530 tree store
8531 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8532 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8533 }
8534
8535 /* Fold a call to built-in function FNDECL with 0 arguments.
8536 This function returns NULL_TREE if no simplification was possible. */
8537
8538 static tree
8539 fold_builtin_0 (location_t loc, tree fndecl)
8540 {
8541 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8542 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8543 switch (fcode)
8544 {
8545 CASE_FLT_FN (BUILT_IN_INF):
8546 case BUILT_IN_INFD32:
8547 case BUILT_IN_INFD64:
8548 case BUILT_IN_INFD128:
8549 return fold_builtin_inf (loc, type, true);
8550
8551 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8552 return fold_builtin_inf (loc, type, false);
8553
8554 case BUILT_IN_CLASSIFY_TYPE:
8555 return fold_builtin_classify_type (NULL_TREE);
8556
8557 default:
8558 break;
8559 }
8560 return NULL_TREE;
8561 }
8562
8563 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8564 This function returns NULL_TREE if no simplification was possible. */
8565
8566 static tree
8567 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8568 {
8569 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8570 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8571 switch (fcode)
8572 {
8573 case BUILT_IN_CONSTANT_P:
8574 {
8575 tree val = fold_builtin_constant_p (arg0);
8576
8577 /* Gimplification will pull the CALL_EXPR for the builtin out of
8578 an if condition. When not optimizing, we'll not CSE it back.
8579 To avoid link error types of regressions, return false now. */
8580 if (!val && !optimize)
8581 val = integer_zero_node;
8582
8583 return val;
8584 }
8585
8586 case BUILT_IN_CLASSIFY_TYPE:
8587 return fold_builtin_classify_type (arg0);
8588
8589 case BUILT_IN_STRLEN:
8590 return fold_builtin_strlen (loc, type, arg0);
8591
8592 CASE_FLT_FN (BUILT_IN_FABS):
8593 case BUILT_IN_FABSD32:
8594 case BUILT_IN_FABSD64:
8595 case BUILT_IN_FABSD128:
8596 return fold_builtin_fabs (loc, arg0, type);
8597
8598 case BUILT_IN_ABS:
8599 case BUILT_IN_LABS:
8600 case BUILT_IN_LLABS:
8601 case BUILT_IN_IMAXABS:
8602 return fold_builtin_abs (loc, arg0, type);
8603
8604 CASE_FLT_FN (BUILT_IN_CONJ):
8605 if (validate_arg (arg0, COMPLEX_TYPE)
8606 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8607 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8608 break;
8609
8610 CASE_FLT_FN (BUILT_IN_CREAL):
8611 if (validate_arg (arg0, COMPLEX_TYPE)
8612 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8613 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8614 break;
8615
8616 CASE_FLT_FN (BUILT_IN_CIMAG):
8617 if (validate_arg (arg0, COMPLEX_TYPE)
8618 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8619 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8620 break;
8621
8622 CASE_FLT_FN (BUILT_IN_CCOS):
8623 if (validate_arg (arg0, COMPLEX_TYPE)
8624 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8625 return do_mpc_arg1 (arg0, type, mpc_cos);
8626 break;
8627
8628 CASE_FLT_FN (BUILT_IN_CCOSH):
8629 if (validate_arg (arg0, COMPLEX_TYPE)
8630 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8631 return do_mpc_arg1 (arg0, type, mpc_cosh);
8632 break;
8633
8634 CASE_FLT_FN (BUILT_IN_CPROJ):
8635 if (TREE_CODE (arg0) == COMPLEX_CST
8636 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8637 {
8638 const REAL_VALUE_TYPE *real
8639 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
8640 const REAL_VALUE_TYPE *imag
8641 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
8642
8643 if (real_isinf (real) || real_isinf (imag))
8644 return build_complex_inf (type, imag->sign);
8645 else
8646 return arg0;
8647 }
8648 break;
8649
8650 CASE_FLT_FN (BUILT_IN_CSIN):
8651 if (validate_arg (arg0, COMPLEX_TYPE)
8652 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8653 return do_mpc_arg1 (arg0, type, mpc_sin);
8654 break;
8655
8656 CASE_FLT_FN (BUILT_IN_CSINH):
8657 if (validate_arg (arg0, COMPLEX_TYPE)
8658 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8659 return do_mpc_arg1 (arg0, type, mpc_sinh);
8660 break;
8661
8662 CASE_FLT_FN (BUILT_IN_CTAN):
8663 if (validate_arg (arg0, COMPLEX_TYPE)
8664 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8665 return do_mpc_arg1 (arg0, type, mpc_tan);
8666 break;
8667
8668 CASE_FLT_FN (BUILT_IN_CTANH):
8669 if (validate_arg (arg0, COMPLEX_TYPE)
8670 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8671 return do_mpc_arg1 (arg0, type, mpc_tanh);
8672 break;
8673
8674 CASE_FLT_FN (BUILT_IN_CLOG):
8675 if (validate_arg (arg0, COMPLEX_TYPE)
8676 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8677 return do_mpc_arg1 (arg0, type, mpc_log);
8678 break;
8679
8680 CASE_FLT_FN (BUILT_IN_CSQRT):
8681 if (validate_arg (arg0, COMPLEX_TYPE)
8682 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8683 return do_mpc_arg1 (arg0, type, mpc_sqrt);
8684 break;
8685
8686 CASE_FLT_FN (BUILT_IN_CASIN):
8687 if (validate_arg (arg0, COMPLEX_TYPE)
8688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8689 return do_mpc_arg1 (arg0, type, mpc_asin);
8690 break;
8691
8692 CASE_FLT_FN (BUILT_IN_CACOS):
8693 if (validate_arg (arg0, COMPLEX_TYPE)
8694 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8695 return do_mpc_arg1 (arg0, type, mpc_acos);
8696 break;
8697
8698 CASE_FLT_FN (BUILT_IN_CATAN):
8699 if (validate_arg (arg0, COMPLEX_TYPE)
8700 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8701 return do_mpc_arg1 (arg0, type, mpc_atan);
8702 break;
8703
8704 CASE_FLT_FN (BUILT_IN_CASINH):
8705 if (validate_arg (arg0, COMPLEX_TYPE)
8706 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8707 return do_mpc_arg1 (arg0, type, mpc_asinh);
8708 break;
8709
8710 CASE_FLT_FN (BUILT_IN_CACOSH):
8711 if (validate_arg (arg0, COMPLEX_TYPE)
8712 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8713 return do_mpc_arg1 (arg0, type, mpc_acosh);
8714 break;
8715
8716 CASE_FLT_FN (BUILT_IN_CATANH):
8717 if (validate_arg (arg0, COMPLEX_TYPE)
8718 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8719 return do_mpc_arg1 (arg0, type, mpc_atanh);
8720 break;
8721
8722 CASE_FLT_FN (BUILT_IN_CABS):
8723 if (TREE_CODE (arg0) == COMPLEX_CST
8724 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8725 return do_mpfr_arg2 (TREE_REALPART (arg0), TREE_IMAGPART (arg0),
8726 type, mpfr_hypot);
8727 break;
8728
8729 CASE_FLT_FN (BUILT_IN_CARG):
8730 return fold_builtin_carg (loc, arg0, type);
8731
8732 CASE_FLT_FN (BUILT_IN_SQRT):
8733 if (validate_arg (arg0, REAL_TYPE))
8734 return do_mpfr_arg1 (arg0, type, mpfr_sqrt, &dconst0, NULL, true);
8735 break;
8736
8737 CASE_FLT_FN (BUILT_IN_CBRT):
8738 if (validate_arg (arg0, REAL_TYPE))
8739 return do_mpfr_arg1 (arg0, type, mpfr_cbrt, NULL, NULL, 0);
8740 break;
8741
8742 CASE_FLT_FN (BUILT_IN_ASIN):
8743 if (validate_arg (arg0, REAL_TYPE))
8744 return do_mpfr_arg1 (arg0, type, mpfr_asin,
8745 &dconstm1, &dconst1, true);
8746 break;
8747
8748 CASE_FLT_FN (BUILT_IN_ACOS):
8749 if (validate_arg (arg0, REAL_TYPE))
8750 return do_mpfr_arg1 (arg0, type, mpfr_acos,
8751 &dconstm1, &dconst1, true);
8752 break;
8753
8754 CASE_FLT_FN (BUILT_IN_ATAN):
8755 if (validate_arg (arg0, REAL_TYPE))
8756 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
8757 break;
8758
8759 CASE_FLT_FN (BUILT_IN_ASINH):
8760 if (validate_arg (arg0, REAL_TYPE))
8761 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
8762 break;
8763
8764 CASE_FLT_FN (BUILT_IN_ACOSH):
8765 if (validate_arg (arg0, REAL_TYPE))
8766 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
8767 &dconst1, NULL, true);
8768 break;
8769
8770 CASE_FLT_FN (BUILT_IN_ATANH):
8771 if (validate_arg (arg0, REAL_TYPE))
8772 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
8773 &dconstm1, &dconst1, false);
8774 break;
8775
8776 CASE_FLT_FN (BUILT_IN_SIN):
8777 if (validate_arg (arg0, REAL_TYPE))
8778 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
8779 break;
8780
8781 CASE_FLT_FN (BUILT_IN_COS):
8782 if (validate_arg (arg0, REAL_TYPE))
8783 return do_mpfr_arg1 (arg0, type, mpfr_cos, NULL, NULL, 0);
8784 break;
8785
8786 CASE_FLT_FN (BUILT_IN_TAN):
8787 if (validate_arg (arg0, REAL_TYPE))
8788 return do_mpfr_arg1 (arg0, type, mpfr_tan, NULL, NULL, 0);
8789 break;
8790
8791 CASE_FLT_FN (BUILT_IN_CEXP):
8792 if (validate_arg (arg0, COMPLEX_TYPE)
8793 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8794 return do_mpc_arg1 (arg0, type, mpc_exp);
8795 break;
8796
8797 CASE_FLT_FN (BUILT_IN_CEXPI):
8798 if (validate_arg (arg0, REAL_TYPE))
8799 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
8800 break;
8801
8802 CASE_FLT_FN (BUILT_IN_SINH):
8803 if (validate_arg (arg0, REAL_TYPE))
8804 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
8805 break;
8806
8807 CASE_FLT_FN (BUILT_IN_COSH):
8808 if (validate_arg (arg0, REAL_TYPE))
8809 return do_mpfr_arg1 (arg0, type, mpfr_cosh, NULL, NULL, 0);
8810 break;
8811
8812 CASE_FLT_FN (BUILT_IN_TANH):
8813 if (validate_arg (arg0, REAL_TYPE))
8814 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
8815 break;
8816
8817 CASE_FLT_FN (BUILT_IN_ERF):
8818 if (validate_arg (arg0, REAL_TYPE))
8819 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
8820 break;
8821
8822 CASE_FLT_FN (BUILT_IN_ERFC):
8823 if (validate_arg (arg0, REAL_TYPE))
8824 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
8825 break;
8826
8827 CASE_FLT_FN (BUILT_IN_TGAMMA):
8828 if (validate_arg (arg0, REAL_TYPE))
8829 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
8830 break;
8831
8832 CASE_FLT_FN (BUILT_IN_EXP):
8833 if (validate_arg (arg0, REAL_TYPE))
8834 return do_mpfr_arg1 (arg0, type, mpfr_exp, NULL, NULL, 0);
8835 break;
8836
8837 CASE_FLT_FN (BUILT_IN_EXP2):
8838 if (validate_arg (arg0, REAL_TYPE))
8839 return do_mpfr_arg1 (arg0, type, mpfr_exp2, NULL, NULL, 0);
8840 break;
8841
8842 CASE_FLT_FN (BUILT_IN_EXP10):
8843 CASE_FLT_FN (BUILT_IN_POW10):
8844 if (validate_arg (arg0, REAL_TYPE))
8845 return do_mpfr_arg1 (arg0, type, mpfr_exp10, NULL, NULL, 0);
8846 break;
8847
8848 CASE_FLT_FN (BUILT_IN_EXPM1):
8849 if (validate_arg (arg0, REAL_TYPE))
8850 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
8851 break;
8852
8853 CASE_FLT_FN (BUILT_IN_LOG):
8854 if (validate_arg (arg0, REAL_TYPE))
8855 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
8856 break;
8857
8858 CASE_FLT_FN (BUILT_IN_LOG2):
8859 if (validate_arg (arg0, REAL_TYPE))
8860 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
8861 break;
8862
8863 CASE_FLT_FN (BUILT_IN_LOG10):
8864 if (validate_arg (arg0, REAL_TYPE))
8865 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
8866 break;
8867
8868 CASE_FLT_FN (BUILT_IN_LOG1P):
8869 if (validate_arg (arg0, REAL_TYPE))
8870 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
8871 &dconstm1, NULL, false);
8872 break;
8873
8874 CASE_FLT_FN (BUILT_IN_J0):
8875 if (validate_arg (arg0, REAL_TYPE))
8876 return do_mpfr_arg1 (arg0, type, mpfr_j0,
8877 NULL, NULL, 0);
8878 break;
8879
8880 CASE_FLT_FN (BUILT_IN_J1):
8881 if (validate_arg (arg0, REAL_TYPE))
8882 return do_mpfr_arg1 (arg0, type, mpfr_j1,
8883 NULL, NULL, 0);
8884 break;
8885
8886 CASE_FLT_FN (BUILT_IN_Y0):
8887 if (validate_arg (arg0, REAL_TYPE))
8888 return do_mpfr_arg1 (arg0, type, mpfr_y0,
8889 &dconst0, NULL, false);
8890 break;
8891
8892 CASE_FLT_FN (BUILT_IN_Y1):
8893 if (validate_arg (arg0, REAL_TYPE))
8894 return do_mpfr_arg1 (arg0, type, mpfr_y1,
8895 &dconst0, NULL, false);
8896 break;
8897
8898 CASE_FLT_FN (BUILT_IN_NAN):
8899 case BUILT_IN_NAND32:
8900 case BUILT_IN_NAND64:
8901 case BUILT_IN_NAND128:
8902 return fold_builtin_nan (arg0, type, true);
8903
8904 CASE_FLT_FN (BUILT_IN_NANS):
8905 return fold_builtin_nan (arg0, type, false);
8906
8907 CASE_FLT_FN (BUILT_IN_FLOOR):
8908 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8909 {
8910 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8911 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8912 {
8913 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8914 REAL_VALUE_TYPE r;
8915 real_floor (&r, TYPE_MODE (type), &x);
8916 return build_real (type, r);
8917 }
8918 }
8919 break;
8920
8921 CASE_FLT_FN (BUILT_IN_CEIL):
8922 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8923 {
8924 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8925 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8926 {
8927 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8928 REAL_VALUE_TYPE r;
8929 real_ceil (&r, TYPE_MODE (type), &x);
8930 return build_real (type, r);
8931 }
8932 }
8933 break;
8934
8935 CASE_FLT_FN (BUILT_IN_TRUNC):
8936 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8937 {
8938 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8939 REAL_VALUE_TYPE r;
8940 real_trunc (&r, TYPE_MODE (type), &x);
8941 return build_real (type, r);
8942 }
8943 break;
8944
8945 CASE_FLT_FN (BUILT_IN_ROUND):
8946 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8947 {
8948 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8949 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8950 {
8951 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8952 REAL_VALUE_TYPE r;
8953 real_round (&r, TYPE_MODE (type), &x);
8954 return build_real (type, r);
8955 }
8956 }
8957 break;
8958
8959 CASE_FLT_FN (BUILT_IN_ICEIL):
8960 CASE_FLT_FN (BUILT_IN_LCEIL):
8961 CASE_FLT_FN (BUILT_IN_LLCEIL):
8962 return do_real_to_int_conversion (type, arg0, real_ceil);
8963
8964 CASE_FLT_FN (BUILT_IN_LFLOOR):
8965 CASE_FLT_FN (BUILT_IN_IFLOOR):
8966 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8967 return do_real_to_int_conversion (type, arg0, real_floor);
8968
8969 CASE_FLT_FN (BUILT_IN_IROUND):
8970 CASE_FLT_FN (BUILT_IN_LROUND):
8971 CASE_FLT_FN (BUILT_IN_LLROUND):
8972 return do_real_to_int_conversion (type, arg0, real_round);
8973
8974 CASE_FLT_FN (BUILT_IN_IRINT):
8975 CASE_FLT_FN (BUILT_IN_LRINT):
8976 CASE_FLT_FN (BUILT_IN_LLRINT):
8977 /* Not yet folded to a constant. */
8978 return NULL_TREE;
8979
8980 case BUILT_IN_BSWAP16:
8981 case BUILT_IN_BSWAP32:
8982 case BUILT_IN_BSWAP64:
8983 return fold_builtin_bswap (fndecl, arg0);
8984
8985 CASE_INT_FN (BUILT_IN_FFS):
8986 CASE_INT_FN (BUILT_IN_CLZ):
8987 CASE_INT_FN (BUILT_IN_CTZ):
8988 CASE_INT_FN (BUILT_IN_CLRSB):
8989 CASE_INT_FN (BUILT_IN_POPCOUNT):
8990 CASE_INT_FN (BUILT_IN_PARITY):
8991 return fold_builtin_bitop (fndecl, arg0);
8992
8993 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8994 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8995 return (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))
8996 ? build_one_cst (type)
8997 : build_zero_cst (type));
8998 break;
8999
9000 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9001 return fold_const_builtin_significand (loc, arg0, type);
9002
9003 CASE_FLT_FN (BUILT_IN_ILOGB):
9004 CASE_FLT_FN (BUILT_IN_LOGB):
9005 return fold_const_builtin_logb (loc, arg0, type);
9006
9007 case BUILT_IN_ISASCII:
9008 return fold_builtin_isascii (loc, arg0);
9009
9010 case BUILT_IN_TOASCII:
9011 return fold_builtin_toascii (loc, arg0);
9012
9013 case BUILT_IN_ISDIGIT:
9014 return fold_builtin_isdigit (loc, arg0);
9015
9016 CASE_FLT_FN (BUILT_IN_FINITE):
9017 case BUILT_IN_FINITED32:
9018 case BUILT_IN_FINITED64:
9019 case BUILT_IN_FINITED128:
9020 case BUILT_IN_ISFINITE:
9021 {
9022 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9023 if (ret)
9024 return ret;
9025 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9026 }
9027
9028 CASE_FLT_FN (BUILT_IN_ISINF):
9029 case BUILT_IN_ISINFD32:
9030 case BUILT_IN_ISINFD64:
9031 case BUILT_IN_ISINFD128:
9032 {
9033 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9034 if (ret)
9035 return ret;
9036 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9037 }
9038
9039 case BUILT_IN_ISNORMAL:
9040 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9041
9042 case BUILT_IN_ISINF_SIGN:
9043 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9044
9045 CASE_FLT_FN (BUILT_IN_ISNAN):
9046 case BUILT_IN_ISNAND32:
9047 case BUILT_IN_ISNAND64:
9048 case BUILT_IN_ISNAND128:
9049 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9050
9051 case BUILT_IN_FREE:
9052 if (integer_zerop (arg0))
9053 return build_empty_stmt (loc);
9054 break;
9055
9056 default:
9057 break;
9058 }
9059
9060 return NULL_TREE;
9061
9062 }
9063
9064 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9065 This function returns NULL_TREE if no simplification was possible. */
9066
9067 static tree
9068 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9069 {
9070 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9071 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9072
9073 switch (fcode)
9074 {
9075 CASE_FLT_FN (BUILT_IN_JN):
9076 if (validate_arg (arg0, INTEGER_TYPE)
9077 && validate_arg (arg1, REAL_TYPE))
9078 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9079 break;
9080
9081 CASE_FLT_FN (BUILT_IN_YN):
9082 if (validate_arg (arg0, INTEGER_TYPE)
9083 && validate_arg (arg1, REAL_TYPE))
9084 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9085 &dconst0, false);
9086 break;
9087
9088 CASE_FLT_FN (BUILT_IN_DREM):
9089 CASE_FLT_FN (BUILT_IN_REMAINDER):
9090 if (validate_arg (arg0, REAL_TYPE)
9091 && validate_arg (arg1, REAL_TYPE))
9092 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9093 break;
9094
9095 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9096 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9097 if (validate_arg (arg0, REAL_TYPE)
9098 && validate_arg (arg1, POINTER_TYPE))
9099 return do_mpfr_lgamma_r (arg0, arg1, type);
9100 break;
9101
9102 CASE_FLT_FN (BUILT_IN_ATAN2):
9103 if (validate_arg (arg0, REAL_TYPE)
9104 && validate_arg (arg1, REAL_TYPE))
9105 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9106 break;
9107
9108 CASE_FLT_FN (BUILT_IN_FDIM):
9109 if (validate_arg (arg0, REAL_TYPE)
9110 && validate_arg (arg1, REAL_TYPE))
9111 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9112 break;
9113
9114 CASE_FLT_FN (BUILT_IN_HYPOT):
9115 if (validate_arg (arg0, REAL_TYPE)
9116 && validate_arg (arg1, REAL_TYPE))
9117 return do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot);
9118 break;
9119
9120 CASE_FLT_FN (BUILT_IN_CPOW):
9121 if (validate_arg (arg0, COMPLEX_TYPE)
9122 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9123 && validate_arg (arg1, COMPLEX_TYPE)
9124 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9125 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9126 break;
9127
9128 CASE_FLT_FN (BUILT_IN_LDEXP):
9129 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9130 CASE_FLT_FN (BUILT_IN_SCALBN):
9131 CASE_FLT_FN (BUILT_IN_SCALBLN):
9132 return fold_builtin_load_exponent (loc, arg0, arg1,
9133 type, /*ldexp=*/false);
9134
9135 CASE_FLT_FN (BUILT_IN_FREXP):
9136 return fold_builtin_frexp (loc, arg0, arg1, type);
9137
9138 CASE_FLT_FN (BUILT_IN_MODF):
9139 return fold_builtin_modf (loc, arg0, arg1, type);
9140
9141 case BUILT_IN_STRSTR:
9142 return fold_builtin_strstr (loc, arg0, arg1, type);
9143
9144 case BUILT_IN_STRSPN:
9145 return fold_builtin_strspn (loc, arg0, arg1);
9146
9147 case BUILT_IN_STRCSPN:
9148 return fold_builtin_strcspn (loc, arg0, arg1);
9149
9150 case BUILT_IN_STRCHR:
9151 case BUILT_IN_INDEX:
9152 return fold_builtin_strchr (loc, arg0, arg1, type);
9153
9154 case BUILT_IN_STRRCHR:
9155 case BUILT_IN_RINDEX:
9156 return fold_builtin_strrchr (loc, arg0, arg1, type);
9157
9158 case BUILT_IN_STRCMP:
9159 return fold_builtin_strcmp (loc, arg0, arg1);
9160
9161 case BUILT_IN_STRPBRK:
9162 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9163
9164 case BUILT_IN_EXPECT:
9165 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9166
9167 CASE_FLT_FN (BUILT_IN_POW):
9168 return fold_const_builtin_pow (arg0, arg1, type);
9169
9170 CASE_FLT_FN (BUILT_IN_POWI):
9171 if (TREE_CODE (arg0) == REAL_CST
9172 && !TREE_OVERFLOW (arg0)
9173 && tree_fits_shwi_p (arg1))
9174 {
9175 HOST_WIDE_INT c = tree_to_shwi (arg1);
9176 REAL_VALUE_TYPE x;
9177 real_powi (&x, TYPE_MODE (type), TREE_REAL_CST_PTR (arg0), c);
9178 return build_real (type, x);
9179 }
9180 break;
9181
9182 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9183 if (TREE_CODE (arg0) == REAL_CST
9184 && TREE_CODE (arg1) == REAL_CST
9185 && !TREE_OVERFLOW (arg0)
9186 && !TREE_OVERFLOW (arg1))
9187 {
9188 REAL_VALUE_TYPE c1 = TREE_REAL_CST (arg0);
9189 real_copysign (&c1, TREE_REAL_CST_PTR (arg1));
9190 return build_real (type, c1);
9191 }
9192 break;
9193
9194 CASE_FLT_FN (BUILT_IN_FMIN):
9195 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9196 return do_mpfr_arg2 (arg0, arg1, type, mpfr_min);
9197 break;
9198
9199 CASE_FLT_FN (BUILT_IN_FMAX):
9200 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9201 return do_mpfr_arg2 (arg0, arg1, type, mpfr_max);
9202 break;
9203
9204 case BUILT_IN_ISGREATER:
9205 return fold_builtin_unordered_cmp (loc, fndecl,
9206 arg0, arg1, UNLE_EXPR, LE_EXPR);
9207 case BUILT_IN_ISGREATEREQUAL:
9208 return fold_builtin_unordered_cmp (loc, fndecl,
9209 arg0, arg1, UNLT_EXPR, LT_EXPR);
9210 case BUILT_IN_ISLESS:
9211 return fold_builtin_unordered_cmp (loc, fndecl,
9212 arg0, arg1, UNGE_EXPR, GE_EXPR);
9213 case BUILT_IN_ISLESSEQUAL:
9214 return fold_builtin_unordered_cmp (loc, fndecl,
9215 arg0, arg1, UNGT_EXPR, GT_EXPR);
9216 case BUILT_IN_ISLESSGREATER:
9217 return fold_builtin_unordered_cmp (loc, fndecl,
9218 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9219 case BUILT_IN_ISUNORDERED:
9220 return fold_builtin_unordered_cmp (loc, fndecl,
9221 arg0, arg1, UNORDERED_EXPR,
9222 NOP_EXPR);
9223
9224 /* We do the folding for va_start in the expander. */
9225 case BUILT_IN_VA_START:
9226 break;
9227
9228 case BUILT_IN_OBJECT_SIZE:
9229 return fold_builtin_object_size (arg0, arg1);
9230
9231 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9232 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9233
9234 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9235 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9236
9237 default:
9238 break;
9239 }
9240 return NULL_TREE;
9241 }
9242
9243 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9244 and ARG2.
9245 This function returns NULL_TREE if no simplification was possible. */
9246
9247 static tree
9248 fold_builtin_3 (location_t loc, tree fndecl,
9249 tree arg0, tree arg1, tree arg2)
9250 {
9251 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9252 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9253 switch (fcode)
9254 {
9255
9256 CASE_FLT_FN (BUILT_IN_SINCOS):
9257 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9258
9259 CASE_FLT_FN (BUILT_IN_FMA):
9260 if (tree tem = fold_fma (loc, type, arg0, arg1, arg2))
9261 return tem;
9262 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9263
9264 CASE_FLT_FN (BUILT_IN_REMQUO):
9265 if (validate_arg (arg0, REAL_TYPE)
9266 && validate_arg (arg1, REAL_TYPE)
9267 && validate_arg (arg2, POINTER_TYPE))
9268 return do_mpfr_remquo (arg0, arg1, arg2);
9269 break;
9270
9271 case BUILT_IN_STRNCMP:
9272 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
9273
9274 case BUILT_IN_MEMCHR:
9275 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
9276
9277 case BUILT_IN_BCMP:
9278 case BUILT_IN_MEMCMP:
9279 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9280
9281 case BUILT_IN_EXPECT:
9282 return fold_builtin_expect (loc, arg0, arg1, arg2);
9283
9284 case BUILT_IN_ADD_OVERFLOW:
9285 case BUILT_IN_SUB_OVERFLOW:
9286 case BUILT_IN_MUL_OVERFLOW:
9287 case BUILT_IN_SADD_OVERFLOW:
9288 case BUILT_IN_SADDL_OVERFLOW:
9289 case BUILT_IN_SADDLL_OVERFLOW:
9290 case BUILT_IN_SSUB_OVERFLOW:
9291 case BUILT_IN_SSUBL_OVERFLOW:
9292 case BUILT_IN_SSUBLL_OVERFLOW:
9293 case BUILT_IN_SMUL_OVERFLOW:
9294 case BUILT_IN_SMULL_OVERFLOW:
9295 case BUILT_IN_SMULLL_OVERFLOW:
9296 case BUILT_IN_UADD_OVERFLOW:
9297 case BUILT_IN_UADDL_OVERFLOW:
9298 case BUILT_IN_UADDLL_OVERFLOW:
9299 case BUILT_IN_USUB_OVERFLOW:
9300 case BUILT_IN_USUBL_OVERFLOW:
9301 case BUILT_IN_USUBLL_OVERFLOW:
9302 case BUILT_IN_UMUL_OVERFLOW:
9303 case BUILT_IN_UMULL_OVERFLOW:
9304 case BUILT_IN_UMULLL_OVERFLOW:
9305 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9306
9307 default:
9308 break;
9309 }
9310 return NULL_TREE;
9311 }
9312
9313 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9314 arguments. IGNORE is true if the result of the
9315 function call is ignored. This function returns NULL_TREE if no
9316 simplification was possible. */
9317
9318 tree
9319 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9320 {
9321 tree ret = NULL_TREE;
9322
9323 switch (nargs)
9324 {
9325 case 0:
9326 ret = fold_builtin_0 (loc, fndecl);
9327 break;
9328 case 1:
9329 ret = fold_builtin_1 (loc, fndecl, args[0]);
9330 break;
9331 case 2:
9332 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9333 break;
9334 case 3:
9335 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9336 break;
9337 default:
9338 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9339 break;
9340 }
9341 if (ret)
9342 {
9343 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9344 SET_EXPR_LOCATION (ret, loc);
9345 TREE_NO_WARNING (ret) = 1;
9346 return ret;
9347 }
9348 return NULL_TREE;
9349 }
9350
9351 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9352 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9353 of arguments in ARGS to be omitted. OLDNARGS is the number of
9354 elements in ARGS. */
9355
9356 static tree
9357 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9358 int skip, tree fndecl, int n, va_list newargs)
9359 {
9360 int nargs = oldnargs - skip + n;
9361 tree *buffer;
9362
9363 if (n > 0)
9364 {
9365 int i, j;
9366
9367 buffer = XALLOCAVEC (tree, nargs);
9368 for (i = 0; i < n; i++)
9369 buffer[i] = va_arg (newargs, tree);
9370 for (j = skip; j < oldnargs; j++, i++)
9371 buffer[i] = args[j];
9372 }
9373 else
9374 buffer = args + skip;
9375
9376 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9377 }
9378
9379 /* Return true if FNDECL shouldn't be folded right now.
9380 If a built-in function has an inline attribute always_inline
9381 wrapper, defer folding it after always_inline functions have
9382 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9383 might not be performed. */
9384
9385 bool
9386 avoid_folding_inline_builtin (tree fndecl)
9387 {
9388 return (DECL_DECLARED_INLINE_P (fndecl)
9389 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9390 && cfun
9391 && !cfun->always_inline_functions_inlined
9392 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9393 }
9394
9395 /* A wrapper function for builtin folding that prevents warnings for
9396 "statement without effect" and the like, caused by removing the
9397 call node earlier than the warning is generated. */
9398
9399 tree
9400 fold_call_expr (location_t loc, tree exp, bool ignore)
9401 {
9402 tree ret = NULL_TREE;
9403 tree fndecl = get_callee_fndecl (exp);
9404 if (fndecl
9405 && TREE_CODE (fndecl) == FUNCTION_DECL
9406 && DECL_BUILT_IN (fndecl)
9407 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9408 yet. Defer folding until we see all the arguments
9409 (after inlining). */
9410 && !CALL_EXPR_VA_ARG_PACK (exp))
9411 {
9412 int nargs = call_expr_nargs (exp);
9413
9414 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9415 instead last argument is __builtin_va_arg_pack (). Defer folding
9416 even in that case, until arguments are finalized. */
9417 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9418 {
9419 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9420 if (fndecl2
9421 && TREE_CODE (fndecl2) == FUNCTION_DECL
9422 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9423 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9424 return NULL_TREE;
9425 }
9426
9427 if (avoid_folding_inline_builtin (fndecl))
9428 return NULL_TREE;
9429
9430 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9431 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9432 CALL_EXPR_ARGP (exp), ignore);
9433 else
9434 {
9435 tree *args = CALL_EXPR_ARGP (exp);
9436 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9437 if (ret)
9438 return ret;
9439 }
9440 }
9441 return NULL_TREE;
9442 }
9443
9444 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9445 N arguments are passed in the array ARGARRAY. Return a folded
9446 expression or NULL_TREE if no simplification was possible. */
9447
9448 tree
9449 fold_builtin_call_array (location_t loc, tree,
9450 tree fn,
9451 int n,
9452 tree *argarray)
9453 {
9454 if (TREE_CODE (fn) != ADDR_EXPR)
9455 return NULL_TREE;
9456
9457 tree fndecl = TREE_OPERAND (fn, 0);
9458 if (TREE_CODE (fndecl) == FUNCTION_DECL
9459 && DECL_BUILT_IN (fndecl))
9460 {
9461 /* If last argument is __builtin_va_arg_pack (), arguments to this
9462 function are not finalized yet. Defer folding until they are. */
9463 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9464 {
9465 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9466 if (fndecl2
9467 && TREE_CODE (fndecl2) == FUNCTION_DECL
9468 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9469 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9470 return NULL_TREE;
9471 }
9472 if (avoid_folding_inline_builtin (fndecl))
9473 return NULL_TREE;
9474 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9475 return targetm.fold_builtin (fndecl, n, argarray, false);
9476 else
9477 return fold_builtin_n (loc, fndecl, argarray, n, false);
9478 }
9479
9480 return NULL_TREE;
9481 }
9482
9483 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9484 along with N new arguments specified as the "..." parameters. SKIP
9485 is the number of arguments in EXP to be omitted. This function is used
9486 to do varargs-to-varargs transformations. */
9487
9488 static tree
9489 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9490 {
9491 va_list ap;
9492 tree t;
9493
9494 va_start (ap, n);
9495 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9496 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9497 va_end (ap);
9498
9499 return t;
9500 }
9501
9502 /* Validate a single argument ARG against a tree code CODE representing
9503 a type. */
9504
9505 static bool
9506 validate_arg (const_tree arg, enum tree_code code)
9507 {
9508 if (!arg)
9509 return false;
9510 else if (code == POINTER_TYPE)
9511 return POINTER_TYPE_P (TREE_TYPE (arg));
9512 else if (code == INTEGER_TYPE)
9513 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9514 return code == TREE_CODE (TREE_TYPE (arg));
9515 }
9516
9517 /* This function validates the types of a function call argument list
9518 against a specified list of tree_codes. If the last specifier is a 0,
9519 that represents an ellipses, otherwise the last specifier must be a
9520 VOID_TYPE.
9521
9522 This is the GIMPLE version of validate_arglist. Eventually we want to
9523 completely convert builtins.c to work from GIMPLEs and the tree based
9524 validate_arglist will then be removed. */
9525
9526 bool
9527 validate_gimple_arglist (const gcall *call, ...)
9528 {
9529 enum tree_code code;
9530 bool res = 0;
9531 va_list ap;
9532 const_tree arg;
9533 size_t i;
9534
9535 va_start (ap, call);
9536 i = 0;
9537
9538 do
9539 {
9540 code = (enum tree_code) va_arg (ap, int);
9541 switch (code)
9542 {
9543 case 0:
9544 /* This signifies an ellipses, any further arguments are all ok. */
9545 res = true;
9546 goto end;
9547 case VOID_TYPE:
9548 /* This signifies an endlink, if no arguments remain, return
9549 true, otherwise return false. */
9550 res = (i == gimple_call_num_args (call));
9551 goto end;
9552 default:
9553 /* If no parameters remain or the parameter's code does not
9554 match the specified code, return false. Otherwise continue
9555 checking any remaining arguments. */
9556 arg = gimple_call_arg (call, i++);
9557 if (!validate_arg (arg, code))
9558 goto end;
9559 break;
9560 }
9561 }
9562 while (1);
9563
9564 /* We need gotos here since we can only have one VA_CLOSE in a
9565 function. */
9566 end: ;
9567 va_end (ap);
9568
9569 return res;
9570 }
9571
9572 /* Default target-specific builtin expander that does nothing. */
9573
9574 rtx
9575 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9576 rtx target ATTRIBUTE_UNUSED,
9577 rtx subtarget ATTRIBUTE_UNUSED,
9578 machine_mode mode ATTRIBUTE_UNUSED,
9579 int ignore ATTRIBUTE_UNUSED)
9580 {
9581 return NULL_RTX;
9582 }
9583
9584 /* Returns true is EXP represents data that would potentially reside
9585 in a readonly section. */
9586
9587 bool
9588 readonly_data_expr (tree exp)
9589 {
9590 STRIP_NOPS (exp);
9591
9592 if (TREE_CODE (exp) != ADDR_EXPR)
9593 return false;
9594
9595 exp = get_base_address (TREE_OPERAND (exp, 0));
9596 if (!exp)
9597 return false;
9598
9599 /* Make sure we call decl_readonly_section only for trees it
9600 can handle (since it returns true for everything it doesn't
9601 understand). */
9602 if (TREE_CODE (exp) == STRING_CST
9603 || TREE_CODE (exp) == CONSTRUCTOR
9604 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
9605 return decl_readonly_section (exp, 0);
9606 else
9607 return false;
9608 }
9609
9610 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
9611 to the call, and TYPE is its return type.
9612
9613 Return NULL_TREE if no simplification was possible, otherwise return the
9614 simplified form of the call as a tree.
9615
9616 The simplified form may be a constant or other expression which
9617 computes the same value, but in a more efficient manner (including
9618 calls to other builtin functions).
9619
9620 The call may contain arguments which need to be evaluated, but
9621 which are not useful to determine the result of the call. In
9622 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9623 COMPOUND_EXPR will be an argument which must be evaluated.
9624 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9625 COMPOUND_EXPR in the chain will contain the tree for the simplified
9626 form of the builtin function call. */
9627
9628 static tree
9629 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
9630 {
9631 if (!validate_arg (s1, POINTER_TYPE)
9632 || !validate_arg (s2, POINTER_TYPE))
9633 return NULL_TREE;
9634 else
9635 {
9636 tree fn;
9637 const char *p1, *p2;
9638
9639 p2 = c_getstr (s2);
9640 if (p2 == NULL)
9641 return NULL_TREE;
9642
9643 p1 = c_getstr (s1);
9644 if (p1 != NULL)
9645 {
9646 const char *r = strstr (p1, p2);
9647 tree tem;
9648
9649 if (r == NULL)
9650 return build_int_cst (TREE_TYPE (s1), 0);
9651
9652 /* Return an offset into the constant string argument. */
9653 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9654 return fold_convert_loc (loc, type, tem);
9655 }
9656
9657 /* The argument is const char *, and the result is char *, so we need
9658 a type conversion here to avoid a warning. */
9659 if (p2[0] == '\0')
9660 return fold_convert_loc (loc, type, s1);
9661
9662 if (p2[1] != '\0')
9663 return NULL_TREE;
9664
9665 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9666 if (!fn)
9667 return NULL_TREE;
9668
9669 /* New argument list transforming strstr(s1, s2) to
9670 strchr(s1, s2[0]). */
9671 return build_call_expr_loc (loc, fn, 2, s1,
9672 build_int_cst (integer_type_node, p2[0]));
9673 }
9674 }
9675
9676 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
9677 the call, and TYPE is its return type.
9678
9679 Return NULL_TREE if no simplification was possible, otherwise return the
9680 simplified form of the call as a tree.
9681
9682 The simplified form may be a constant or other expression which
9683 computes the same value, but in a more efficient manner (including
9684 calls to other builtin functions).
9685
9686 The call may contain arguments which need to be evaluated, but
9687 which are not useful to determine the result of the call. In
9688 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9689 COMPOUND_EXPR will be an argument which must be evaluated.
9690 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9691 COMPOUND_EXPR in the chain will contain the tree for the simplified
9692 form of the builtin function call. */
9693
9694 static tree
9695 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
9696 {
9697 if (!validate_arg (s1, POINTER_TYPE)
9698 || !validate_arg (s2, INTEGER_TYPE))
9699 return NULL_TREE;
9700 else
9701 {
9702 const char *p1;
9703
9704 if (TREE_CODE (s2) != INTEGER_CST)
9705 return NULL_TREE;
9706
9707 p1 = c_getstr (s1);
9708 if (p1 != NULL)
9709 {
9710 char c;
9711 const char *r;
9712 tree tem;
9713
9714 if (target_char_cast (s2, &c))
9715 return NULL_TREE;
9716
9717 r = strchr (p1, c);
9718
9719 if (r == NULL)
9720 return build_int_cst (TREE_TYPE (s1), 0);
9721
9722 /* Return an offset into the constant string argument. */
9723 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9724 return fold_convert_loc (loc, type, tem);
9725 }
9726 return NULL_TREE;
9727 }
9728 }
9729
9730 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
9731 the call, and TYPE is its return type.
9732
9733 Return NULL_TREE if no simplification was possible, otherwise return the
9734 simplified form of the call as a tree.
9735
9736 The simplified form may be a constant or other expression which
9737 computes the same value, but in a more efficient manner (including
9738 calls to other builtin functions).
9739
9740 The call may contain arguments which need to be evaluated, but
9741 which are not useful to determine the result of the call. In
9742 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9743 COMPOUND_EXPR will be an argument which must be evaluated.
9744 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9745 COMPOUND_EXPR in the chain will contain the tree for the simplified
9746 form of the builtin function call. */
9747
9748 static tree
9749 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
9750 {
9751 if (!validate_arg (s1, POINTER_TYPE)
9752 || !validate_arg (s2, INTEGER_TYPE))
9753 return NULL_TREE;
9754 else
9755 {
9756 tree fn;
9757 const char *p1;
9758
9759 if (TREE_CODE (s2) != INTEGER_CST)
9760 return NULL_TREE;
9761
9762 p1 = c_getstr (s1);
9763 if (p1 != NULL)
9764 {
9765 char c;
9766 const char *r;
9767 tree tem;
9768
9769 if (target_char_cast (s2, &c))
9770 return NULL_TREE;
9771
9772 r = strrchr (p1, c);
9773
9774 if (r == NULL)
9775 return build_int_cst (TREE_TYPE (s1), 0);
9776
9777 /* Return an offset into the constant string argument. */
9778 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9779 return fold_convert_loc (loc, type, tem);
9780 }
9781
9782 if (! integer_zerop (s2))
9783 return NULL_TREE;
9784
9785 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9786 if (!fn)
9787 return NULL_TREE;
9788
9789 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9790 return build_call_expr_loc (loc, fn, 2, s1, s2);
9791 }
9792 }
9793
9794 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9795 to the call, and TYPE is its return type.
9796
9797 Return NULL_TREE if no simplification was possible, otherwise return the
9798 simplified form of the call as a tree.
9799
9800 The simplified form may be a constant or other expression which
9801 computes the same value, but in a more efficient manner (including
9802 calls to other builtin functions).
9803
9804 The call may contain arguments which need to be evaluated, but
9805 which are not useful to determine the result of the call. In
9806 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9807 COMPOUND_EXPR will be an argument which must be evaluated.
9808 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9809 COMPOUND_EXPR in the chain will contain the tree for the simplified
9810 form of the builtin function call. */
9811
9812 static tree
9813 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9814 {
9815 if (!validate_arg (s1, POINTER_TYPE)
9816 || !validate_arg (s2, POINTER_TYPE))
9817 return NULL_TREE;
9818 else
9819 {
9820 tree fn;
9821 const char *p1, *p2;
9822
9823 p2 = c_getstr (s2);
9824 if (p2 == NULL)
9825 return NULL_TREE;
9826
9827 p1 = c_getstr (s1);
9828 if (p1 != NULL)
9829 {
9830 const char *r = strpbrk (p1, p2);
9831 tree tem;
9832
9833 if (r == NULL)
9834 return build_int_cst (TREE_TYPE (s1), 0);
9835
9836 /* Return an offset into the constant string argument. */
9837 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9838 return fold_convert_loc (loc, type, tem);
9839 }
9840
9841 if (p2[0] == '\0')
9842 /* strpbrk(x, "") == NULL.
9843 Evaluate and ignore s1 in case it had side-effects. */
9844 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9845
9846 if (p2[1] != '\0')
9847 return NULL_TREE; /* Really call strpbrk. */
9848
9849 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9850 if (!fn)
9851 return NULL_TREE;
9852
9853 /* New argument list transforming strpbrk(s1, s2) to
9854 strchr(s1, s2[0]). */
9855 return build_call_expr_loc (loc, fn, 2, s1,
9856 build_int_cst (integer_type_node, p2[0]));
9857 }
9858 }
9859
9860 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9861 to the call.
9862
9863 Return NULL_TREE if no simplification was possible, otherwise return the
9864 simplified form of the call as a tree.
9865
9866 The simplified form may be a constant or other expression which
9867 computes the same value, but in a more efficient manner (including
9868 calls to other builtin functions).
9869
9870 The call may contain arguments which need to be evaluated, but
9871 which are not useful to determine the result of the call. In
9872 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9873 COMPOUND_EXPR will be an argument which must be evaluated.
9874 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9875 COMPOUND_EXPR in the chain will contain the tree for the simplified
9876 form of the builtin function call. */
9877
9878 static tree
9879 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9880 {
9881 if (!validate_arg (s1, POINTER_TYPE)
9882 || !validate_arg (s2, POINTER_TYPE))
9883 return NULL_TREE;
9884 else
9885 {
9886 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9887
9888 /* If both arguments are constants, evaluate at compile-time. */
9889 if (p1 && p2)
9890 {
9891 const size_t r = strspn (p1, p2);
9892 return build_int_cst (size_type_node, r);
9893 }
9894
9895 /* If either argument is "", return NULL_TREE. */
9896 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9897 /* Evaluate and ignore both arguments in case either one has
9898 side-effects. */
9899 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9900 s1, s2);
9901 return NULL_TREE;
9902 }
9903 }
9904
9905 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9906 to the call.
9907
9908 Return NULL_TREE if no simplification was possible, otherwise return the
9909 simplified form of the call as a tree.
9910
9911 The simplified form may be a constant or other expression which
9912 computes the same value, but in a more efficient manner (including
9913 calls to other builtin functions).
9914
9915 The call may contain arguments which need to be evaluated, but
9916 which are not useful to determine the result of the call. In
9917 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9918 COMPOUND_EXPR will be an argument which must be evaluated.
9919 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9920 COMPOUND_EXPR in the chain will contain the tree for the simplified
9921 form of the builtin function call. */
9922
9923 static tree
9924 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9925 {
9926 if (!validate_arg (s1, POINTER_TYPE)
9927 || !validate_arg (s2, POINTER_TYPE))
9928 return NULL_TREE;
9929 else
9930 {
9931 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9932
9933 /* If both arguments are constants, evaluate at compile-time. */
9934 if (p1 && p2)
9935 {
9936 const size_t r = strcspn (p1, p2);
9937 return build_int_cst (size_type_node, r);
9938 }
9939
9940 /* If the first argument is "", return NULL_TREE. */
9941 if (p1 && *p1 == '\0')
9942 {
9943 /* Evaluate and ignore argument s2 in case it has
9944 side-effects. */
9945 return omit_one_operand_loc (loc, size_type_node,
9946 size_zero_node, s2);
9947 }
9948
9949 /* If the second argument is "", return __builtin_strlen(s1). */
9950 if (p2 && *p2 == '\0')
9951 {
9952 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9953
9954 /* If the replacement _DECL isn't initialized, don't do the
9955 transformation. */
9956 if (!fn)
9957 return NULL_TREE;
9958
9959 return build_call_expr_loc (loc, fn, 1, s1);
9960 }
9961 return NULL_TREE;
9962 }
9963 }
9964
9965 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9966 produced. False otherwise. This is done so that we don't output the error
9967 or warning twice or three times. */
9968
9969 bool
9970 fold_builtin_next_arg (tree exp, bool va_start_p)
9971 {
9972 tree fntype = TREE_TYPE (current_function_decl);
9973 int nargs = call_expr_nargs (exp);
9974 tree arg;
9975 /* There is good chance the current input_location points inside the
9976 definition of the va_start macro (perhaps on the token for
9977 builtin) in a system header, so warnings will not be emitted.
9978 Use the location in real source code. */
9979 source_location current_location =
9980 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9981 NULL);
9982
9983 if (!stdarg_p (fntype))
9984 {
9985 error ("%<va_start%> used in function with fixed args");
9986 return true;
9987 }
9988
9989 if (va_start_p)
9990 {
9991 if (va_start_p && (nargs != 2))
9992 {
9993 error ("wrong number of arguments to function %<va_start%>");
9994 return true;
9995 }
9996 arg = CALL_EXPR_ARG (exp, 1);
9997 }
9998 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9999 when we checked the arguments and if needed issued a warning. */
10000 else
10001 {
10002 if (nargs == 0)
10003 {
10004 /* Evidently an out of date version of <stdarg.h>; can't validate
10005 va_start's second argument, but can still work as intended. */
10006 warning_at (current_location,
10007 OPT_Wvarargs,
10008 "%<__builtin_next_arg%> called without an argument");
10009 return true;
10010 }
10011 else if (nargs > 1)
10012 {
10013 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10014 return true;
10015 }
10016 arg = CALL_EXPR_ARG (exp, 0);
10017 }
10018
10019 if (TREE_CODE (arg) == SSA_NAME)
10020 arg = SSA_NAME_VAR (arg);
10021
10022 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10023 or __builtin_next_arg (0) the first time we see it, after checking
10024 the arguments and if needed issuing a warning. */
10025 if (!integer_zerop (arg))
10026 {
10027 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10028
10029 /* Strip off all nops for the sake of the comparison. This
10030 is not quite the same as STRIP_NOPS. It does more.
10031 We must also strip off INDIRECT_EXPR for C++ reference
10032 parameters. */
10033 while (CONVERT_EXPR_P (arg)
10034 || TREE_CODE (arg) == INDIRECT_REF)
10035 arg = TREE_OPERAND (arg, 0);
10036 if (arg != last_parm)
10037 {
10038 /* FIXME: Sometimes with the tree optimizers we can get the
10039 not the last argument even though the user used the last
10040 argument. We just warn and set the arg to be the last
10041 argument so that we will get wrong-code because of
10042 it. */
10043 warning_at (current_location,
10044 OPT_Wvarargs,
10045 "second parameter of %<va_start%> not last named argument");
10046 }
10047
10048 /* Undefined by C99 7.15.1.4p4 (va_start):
10049 "If the parameter parmN is declared with the register storage
10050 class, with a function or array type, or with a type that is
10051 not compatible with the type that results after application of
10052 the default argument promotions, the behavior is undefined."
10053 */
10054 else if (DECL_REGISTER (arg))
10055 {
10056 warning_at (current_location,
10057 OPT_Wvarargs,
10058 "undefined behaviour when second parameter of "
10059 "%<va_start%> is declared with %<register%> storage");
10060 }
10061
10062 /* We want to verify the second parameter just once before the tree
10063 optimizers are run and then avoid keeping it in the tree,
10064 as otherwise we could warn even for correct code like:
10065 void foo (int i, ...)
10066 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10067 if (va_start_p)
10068 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10069 else
10070 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10071 }
10072 return false;
10073 }
10074
10075
10076 /* Expand a call EXP to __builtin_object_size. */
10077
10078 static rtx
10079 expand_builtin_object_size (tree exp)
10080 {
10081 tree ost;
10082 int object_size_type;
10083 tree fndecl = get_callee_fndecl (exp);
10084
10085 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10086 {
10087 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10088 exp, fndecl);
10089 expand_builtin_trap ();
10090 return const0_rtx;
10091 }
10092
10093 ost = CALL_EXPR_ARG (exp, 1);
10094 STRIP_NOPS (ost);
10095
10096 if (TREE_CODE (ost) != INTEGER_CST
10097 || tree_int_cst_sgn (ost) < 0
10098 || compare_tree_int (ost, 3) > 0)
10099 {
10100 error ("%Klast argument of %D is not integer constant between 0 and 3",
10101 exp, fndecl);
10102 expand_builtin_trap ();
10103 return const0_rtx;
10104 }
10105
10106 object_size_type = tree_to_shwi (ost);
10107
10108 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10109 }
10110
10111 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10112 FCODE is the BUILT_IN_* to use.
10113 Return NULL_RTX if we failed; the caller should emit a normal call,
10114 otherwise try to get the result in TARGET, if convenient (and in
10115 mode MODE if that's convenient). */
10116
10117 static rtx
10118 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10119 enum built_in_function fcode)
10120 {
10121 tree dest, src, len, size;
10122
10123 if (!validate_arglist (exp,
10124 POINTER_TYPE,
10125 fcode == BUILT_IN_MEMSET_CHK
10126 ? INTEGER_TYPE : POINTER_TYPE,
10127 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10128 return NULL_RTX;
10129
10130 dest = CALL_EXPR_ARG (exp, 0);
10131 src = CALL_EXPR_ARG (exp, 1);
10132 len = CALL_EXPR_ARG (exp, 2);
10133 size = CALL_EXPR_ARG (exp, 3);
10134
10135 if (! tree_fits_uhwi_p (size))
10136 return NULL_RTX;
10137
10138 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10139 {
10140 tree fn;
10141
10142 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
10143 {
10144 warning_at (tree_nonartificial_location (exp),
10145 0, "%Kcall to %D will always overflow destination buffer",
10146 exp, get_callee_fndecl (exp));
10147 return NULL_RTX;
10148 }
10149
10150 fn = NULL_TREE;
10151 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10152 mem{cpy,pcpy,move,set} is available. */
10153 switch (fcode)
10154 {
10155 case BUILT_IN_MEMCPY_CHK:
10156 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10157 break;
10158 case BUILT_IN_MEMPCPY_CHK:
10159 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10160 break;
10161 case BUILT_IN_MEMMOVE_CHK:
10162 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10163 break;
10164 case BUILT_IN_MEMSET_CHK:
10165 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10166 break;
10167 default:
10168 break;
10169 }
10170
10171 if (! fn)
10172 return NULL_RTX;
10173
10174 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10175 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10176 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10177 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10178 }
10179 else if (fcode == BUILT_IN_MEMSET_CHK)
10180 return NULL_RTX;
10181 else
10182 {
10183 unsigned int dest_align = get_pointer_alignment (dest);
10184
10185 /* If DEST is not a pointer type, call the normal function. */
10186 if (dest_align == 0)
10187 return NULL_RTX;
10188
10189 /* If SRC and DEST are the same (and not volatile), do nothing. */
10190 if (operand_equal_p (src, dest, 0))
10191 {
10192 tree expr;
10193
10194 if (fcode != BUILT_IN_MEMPCPY_CHK)
10195 {
10196 /* Evaluate and ignore LEN in case it has side-effects. */
10197 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10198 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10199 }
10200
10201 expr = fold_build_pointer_plus (dest, len);
10202 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10203 }
10204
10205 /* __memmove_chk special case. */
10206 if (fcode == BUILT_IN_MEMMOVE_CHK)
10207 {
10208 unsigned int src_align = get_pointer_alignment (src);
10209
10210 if (src_align == 0)
10211 return NULL_RTX;
10212
10213 /* If src is categorized for a readonly section we can use
10214 normal __memcpy_chk. */
10215 if (readonly_data_expr (src))
10216 {
10217 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10218 if (!fn)
10219 return NULL_RTX;
10220 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10221 dest, src, len, size);
10222 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10223 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10224 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10225 }
10226 }
10227 return NULL_RTX;
10228 }
10229 }
10230
10231 /* Emit warning if a buffer overflow is detected at compile time. */
10232
10233 static void
10234 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10235 {
10236 int is_strlen = 0;
10237 tree len, size;
10238 location_t loc = tree_nonartificial_location (exp);
10239
10240 switch (fcode)
10241 {
10242 case BUILT_IN_STRCPY_CHK:
10243 case BUILT_IN_STPCPY_CHK:
10244 /* For __strcat_chk the warning will be emitted only if overflowing
10245 by at least strlen (dest) + 1 bytes. */
10246 case BUILT_IN_STRCAT_CHK:
10247 len = CALL_EXPR_ARG (exp, 1);
10248 size = CALL_EXPR_ARG (exp, 2);
10249 is_strlen = 1;
10250 break;
10251 case BUILT_IN_STRNCAT_CHK:
10252 case BUILT_IN_STRNCPY_CHK:
10253 case BUILT_IN_STPNCPY_CHK:
10254 len = CALL_EXPR_ARG (exp, 2);
10255 size = CALL_EXPR_ARG (exp, 3);
10256 break;
10257 case BUILT_IN_SNPRINTF_CHK:
10258 case BUILT_IN_VSNPRINTF_CHK:
10259 len = CALL_EXPR_ARG (exp, 1);
10260 size = CALL_EXPR_ARG (exp, 3);
10261 break;
10262 default:
10263 gcc_unreachable ();
10264 }
10265
10266 if (!len || !size)
10267 return;
10268
10269 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10270 return;
10271
10272 if (is_strlen)
10273 {
10274 len = c_strlen (len, 1);
10275 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10276 return;
10277 }
10278 else if (fcode == BUILT_IN_STRNCAT_CHK)
10279 {
10280 tree src = CALL_EXPR_ARG (exp, 1);
10281 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10282 return;
10283 src = c_strlen (src, 1);
10284 if (! src || ! tree_fits_uhwi_p (src))
10285 {
10286 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
10287 exp, get_callee_fndecl (exp));
10288 return;
10289 }
10290 else if (tree_int_cst_lt (src, size))
10291 return;
10292 }
10293 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
10294 return;
10295
10296 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
10297 exp, get_callee_fndecl (exp));
10298 }
10299
10300 /* Emit warning if a buffer overflow is detected at compile time
10301 in __sprintf_chk/__vsprintf_chk calls. */
10302
10303 static void
10304 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10305 {
10306 tree size, len, fmt;
10307 const char *fmt_str;
10308 int nargs = call_expr_nargs (exp);
10309
10310 /* Verify the required arguments in the original call. */
10311
10312 if (nargs < 4)
10313 return;
10314 size = CALL_EXPR_ARG (exp, 2);
10315 fmt = CALL_EXPR_ARG (exp, 3);
10316
10317 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10318 return;
10319
10320 /* Check whether the format is a literal string constant. */
10321 fmt_str = c_getstr (fmt);
10322 if (fmt_str == NULL)
10323 return;
10324
10325 if (!init_target_chars ())
10326 return;
10327
10328 /* If the format doesn't contain % args or %%, we know its size. */
10329 if (strchr (fmt_str, target_percent) == 0)
10330 len = build_int_cstu (size_type_node, strlen (fmt_str));
10331 /* If the format is "%s" and first ... argument is a string literal,
10332 we know it too. */
10333 else if (fcode == BUILT_IN_SPRINTF_CHK
10334 && strcmp (fmt_str, target_percent_s) == 0)
10335 {
10336 tree arg;
10337
10338 if (nargs < 5)
10339 return;
10340 arg = CALL_EXPR_ARG (exp, 4);
10341 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10342 return;
10343
10344 len = c_strlen (arg, 1);
10345 if (!len || ! tree_fits_uhwi_p (len))
10346 return;
10347 }
10348 else
10349 return;
10350
10351 if (! tree_int_cst_lt (len, size))
10352 warning_at (tree_nonartificial_location (exp),
10353 0, "%Kcall to %D will always overflow destination buffer",
10354 exp, get_callee_fndecl (exp));
10355 }
10356
10357 /* Emit warning if a free is called with address of a variable. */
10358
10359 static void
10360 maybe_emit_free_warning (tree exp)
10361 {
10362 tree arg = CALL_EXPR_ARG (exp, 0);
10363
10364 STRIP_NOPS (arg);
10365 if (TREE_CODE (arg) != ADDR_EXPR)
10366 return;
10367
10368 arg = get_base_address (TREE_OPERAND (arg, 0));
10369 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10370 return;
10371
10372 if (SSA_VAR_P (arg))
10373 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10374 "%Kattempt to free a non-heap object %qD", exp, arg);
10375 else
10376 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10377 "%Kattempt to free a non-heap object", exp);
10378 }
10379
10380 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10381 if possible. */
10382
10383 static tree
10384 fold_builtin_object_size (tree ptr, tree ost)
10385 {
10386 unsigned HOST_WIDE_INT bytes;
10387 int object_size_type;
10388
10389 if (!validate_arg (ptr, POINTER_TYPE)
10390 || !validate_arg (ost, INTEGER_TYPE))
10391 return NULL_TREE;
10392
10393 STRIP_NOPS (ost);
10394
10395 if (TREE_CODE (ost) != INTEGER_CST
10396 || tree_int_cst_sgn (ost) < 0
10397 || compare_tree_int (ost, 3) > 0)
10398 return NULL_TREE;
10399
10400 object_size_type = tree_to_shwi (ost);
10401
10402 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10403 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10404 and (size_t) 0 for types 2 and 3. */
10405 if (TREE_SIDE_EFFECTS (ptr))
10406 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10407
10408 if (TREE_CODE (ptr) == ADDR_EXPR)
10409 {
10410 bytes = compute_builtin_object_size (ptr, object_size_type);
10411 if (wi::fits_to_tree_p (bytes, size_type_node))
10412 return build_int_cstu (size_type_node, bytes);
10413 }
10414 else if (TREE_CODE (ptr) == SSA_NAME)
10415 {
10416 /* If object size is not known yet, delay folding until
10417 later. Maybe subsequent passes will help determining
10418 it. */
10419 bytes = compute_builtin_object_size (ptr, object_size_type);
10420 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
10421 && wi::fits_to_tree_p (bytes, size_type_node))
10422 return build_int_cstu (size_type_node, bytes);
10423 }
10424
10425 return NULL_TREE;
10426 }
10427
10428 /* Builtins with folding operations that operate on "..." arguments
10429 need special handling; we need to store the arguments in a convenient
10430 data structure before attempting any folding. Fortunately there are
10431 only a few builtins that fall into this category. FNDECL is the
10432 function, EXP is the CALL_EXPR for the call. */
10433
10434 static tree
10435 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10436 {
10437 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10438 tree ret = NULL_TREE;
10439
10440 switch (fcode)
10441 {
10442 case BUILT_IN_FPCLASSIFY:
10443 ret = fold_builtin_fpclassify (loc, args, nargs);
10444 break;
10445
10446 default:
10447 break;
10448 }
10449 if (ret)
10450 {
10451 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10452 SET_EXPR_LOCATION (ret, loc);
10453 TREE_NO_WARNING (ret) = 1;
10454 return ret;
10455 }
10456 return NULL_TREE;
10457 }
10458
10459 /* Initialize format string characters in the target charset. */
10460
10461 bool
10462 init_target_chars (void)
10463 {
10464 static bool init;
10465 if (!init)
10466 {
10467 target_newline = lang_hooks.to_target_charset ('\n');
10468 target_percent = lang_hooks.to_target_charset ('%');
10469 target_c = lang_hooks.to_target_charset ('c');
10470 target_s = lang_hooks.to_target_charset ('s');
10471 if (target_newline == 0 || target_percent == 0 || target_c == 0
10472 || target_s == 0)
10473 return false;
10474
10475 target_percent_c[0] = target_percent;
10476 target_percent_c[1] = target_c;
10477 target_percent_c[2] = '\0';
10478
10479 target_percent_s[0] = target_percent;
10480 target_percent_s[1] = target_s;
10481 target_percent_s[2] = '\0';
10482
10483 target_percent_s_newline[0] = target_percent;
10484 target_percent_s_newline[1] = target_s;
10485 target_percent_s_newline[2] = target_newline;
10486 target_percent_s_newline[3] = '\0';
10487
10488 init = true;
10489 }
10490 return true;
10491 }
10492
10493 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10494 and no overflow/underflow occurred. INEXACT is true if M was not
10495 exactly calculated. TYPE is the tree type for the result. This
10496 function assumes that you cleared the MPFR flags and then
10497 calculated M to see if anything subsequently set a flag prior to
10498 entering this function. Return NULL_TREE if any checks fail. */
10499
10500 static tree
10501 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10502 {
10503 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10504 overflow/underflow occurred. If -frounding-math, proceed iff the
10505 result of calling FUNC was exact. */
10506 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10507 && (!flag_rounding_math || !inexact))
10508 {
10509 REAL_VALUE_TYPE rr;
10510
10511 real_from_mpfr (&rr, m, type, GMP_RNDN);
10512 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10513 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10514 but the mpft_t is not, then we underflowed in the
10515 conversion. */
10516 if (real_isfinite (&rr)
10517 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10518 {
10519 REAL_VALUE_TYPE rmode;
10520
10521 real_convert (&rmode, TYPE_MODE (type), &rr);
10522 /* Proceed iff the specified mode can hold the value. */
10523 if (real_identical (&rmode, &rr))
10524 return build_real (type, rmode);
10525 }
10526 }
10527 return NULL_TREE;
10528 }
10529
10530 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10531 number and no overflow/underflow occurred. INEXACT is true if M
10532 was not exactly calculated. TYPE is the tree type for the result.
10533 This function assumes that you cleared the MPFR flags and then
10534 calculated M to see if anything subsequently set a flag prior to
10535 entering this function. Return NULL_TREE if any checks fail, if
10536 FORCE_CONVERT is true, then bypass the checks. */
10537
10538 static tree
10539 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10540 {
10541 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10542 overflow/underflow occurred. If -frounding-math, proceed iff the
10543 result of calling FUNC was exact. */
10544 if (force_convert
10545 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10546 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10547 && (!flag_rounding_math || !inexact)))
10548 {
10549 REAL_VALUE_TYPE re, im;
10550
10551 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10552 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10553 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10554 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10555 but the mpft_t is not, then we underflowed in the
10556 conversion. */
10557 if (force_convert
10558 || (real_isfinite (&re) && real_isfinite (&im)
10559 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10560 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10561 {
10562 REAL_VALUE_TYPE re_mode, im_mode;
10563
10564 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10565 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10566 /* Proceed iff the specified mode can hold the value. */
10567 if (force_convert
10568 || (real_identical (&re_mode, &re)
10569 && real_identical (&im_mode, &im)))
10570 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10571 build_real (TREE_TYPE (type), im_mode));
10572 }
10573 }
10574 return NULL_TREE;
10575 }
10576
10577 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
10578 FUNC on it and return the resulting value as a tree with type TYPE.
10579 If MIN and/or MAX are not NULL, then the supplied ARG must be
10580 within those bounds. If INCLUSIVE is true, then MIN/MAX are
10581 acceptable values, otherwise they are not. The mpfr precision is
10582 set to the precision of TYPE. We assume that function FUNC returns
10583 zero if the result could be calculated exactly within the requested
10584 precision. */
10585
10586 static tree
10587 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
10588 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
10589 bool inclusive)
10590 {
10591 tree result = NULL_TREE;
10592
10593 STRIP_NOPS (arg);
10594
10595 /* To proceed, MPFR must exactly represent the target floating point
10596 format, which only happens when the target base equals two. */
10597 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10598 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
10599 {
10600 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
10601
10602 if (real_isfinite (ra)
10603 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
10604 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
10605 {
10606 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10607 const int prec = fmt->p;
10608 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10609 int inexact;
10610 mpfr_t m;
10611
10612 mpfr_init2 (m, prec);
10613 mpfr_from_real (m, ra, GMP_RNDN);
10614 mpfr_clear_flags ();
10615 inexact = func (m, m, rnd);
10616 result = do_mpfr_ckconv (m, type, inexact);
10617 mpfr_clear (m);
10618 }
10619 }
10620
10621 return result;
10622 }
10623
10624 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
10625 FUNC on it and return the resulting value as a tree with type TYPE.
10626 The mpfr precision is set to the precision of TYPE. We assume that
10627 function FUNC returns zero if the result could be calculated
10628 exactly within the requested precision. */
10629
10630 static tree
10631 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
10632 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
10633 {
10634 tree result = NULL_TREE;
10635
10636 STRIP_NOPS (arg1);
10637 STRIP_NOPS (arg2);
10638
10639 /* To proceed, MPFR must exactly represent the target floating point
10640 format, which only happens when the target base equals two. */
10641 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10642 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
10643 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
10644 {
10645 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
10646 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
10647
10648 if (real_isfinite (ra1) && real_isfinite (ra2))
10649 {
10650 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10651 const int prec = fmt->p;
10652 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10653 int inexact;
10654 mpfr_t m1, m2;
10655
10656 mpfr_inits2 (prec, m1, m2, NULL);
10657 mpfr_from_real (m1, ra1, GMP_RNDN);
10658 mpfr_from_real (m2, ra2, GMP_RNDN);
10659 mpfr_clear_flags ();
10660 inexact = func (m1, m1, m2, rnd);
10661 result = do_mpfr_ckconv (m1, type, inexact);
10662 mpfr_clears (m1, m2, NULL);
10663 }
10664 }
10665
10666 return result;
10667 }
10668
10669 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
10670 FUNC on it and return the resulting value as a tree with type TYPE.
10671 The mpfr precision is set to the precision of TYPE. We assume that
10672 function FUNC returns zero if the result could be calculated
10673 exactly within the requested precision. */
10674
10675 static tree
10676 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
10677 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
10678 {
10679 tree result = NULL_TREE;
10680
10681 STRIP_NOPS (arg1);
10682 STRIP_NOPS (arg2);
10683 STRIP_NOPS (arg3);
10684
10685 /* To proceed, MPFR must exactly represent the target floating point
10686 format, which only happens when the target base equals two. */
10687 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10688 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
10689 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
10690 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
10691 {
10692 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
10693 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
10694 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
10695
10696 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
10697 {
10698 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10699 const int prec = fmt->p;
10700 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10701 int inexact;
10702 mpfr_t m1, m2, m3;
10703
10704 mpfr_inits2 (prec, m1, m2, m3, NULL);
10705 mpfr_from_real (m1, ra1, GMP_RNDN);
10706 mpfr_from_real (m2, ra2, GMP_RNDN);
10707 mpfr_from_real (m3, ra3, GMP_RNDN);
10708 mpfr_clear_flags ();
10709 inexact = func (m1, m1, m2, m3, rnd);
10710 result = do_mpfr_ckconv (m1, type, inexact);
10711 mpfr_clears (m1, m2, m3, NULL);
10712 }
10713 }
10714
10715 return result;
10716 }
10717
10718 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
10719 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
10720 If ARG_SINP and ARG_COSP are NULL then the result is returned
10721 as a complex value.
10722 The type is taken from the type of ARG and is used for setting the
10723 precision of the calculation and results. */
10724
10725 static tree
10726 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
10727 {
10728 tree const type = TREE_TYPE (arg);
10729 tree result = NULL_TREE;
10730
10731 STRIP_NOPS (arg);
10732
10733 /* To proceed, MPFR must exactly represent the target floating point
10734 format, which only happens when the target base equals two. */
10735 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10736 && TREE_CODE (arg) == REAL_CST
10737 && !TREE_OVERFLOW (arg))
10738 {
10739 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
10740
10741 if (real_isfinite (ra))
10742 {
10743 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10744 const int prec = fmt->p;
10745 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10746 tree result_s, result_c;
10747 int inexact;
10748 mpfr_t m, ms, mc;
10749
10750 mpfr_inits2 (prec, m, ms, mc, NULL);
10751 mpfr_from_real (m, ra, GMP_RNDN);
10752 mpfr_clear_flags ();
10753 inexact = mpfr_sin_cos (ms, mc, m, rnd);
10754 result_s = do_mpfr_ckconv (ms, type, inexact);
10755 result_c = do_mpfr_ckconv (mc, type, inexact);
10756 mpfr_clears (m, ms, mc, NULL);
10757 if (result_s && result_c)
10758 {
10759 /* If we are to return in a complex value do so. */
10760 if (!arg_sinp && !arg_cosp)
10761 return build_complex (build_complex_type (type),
10762 result_c, result_s);
10763
10764 /* Dereference the sin/cos pointer arguments. */
10765 arg_sinp = build_fold_indirect_ref (arg_sinp);
10766 arg_cosp = build_fold_indirect_ref (arg_cosp);
10767 /* Proceed if valid pointer type were passed in. */
10768 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
10769 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
10770 {
10771 /* Set the values. */
10772 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
10773 result_s);
10774 TREE_SIDE_EFFECTS (result_s) = 1;
10775 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
10776 result_c);
10777 TREE_SIDE_EFFECTS (result_c) = 1;
10778 /* Combine the assignments into a compound expr. */
10779 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10780 result_s, result_c));
10781 }
10782 }
10783 }
10784 }
10785 return result;
10786 }
10787
10788 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
10789 two-argument mpfr order N Bessel function FUNC on them and return
10790 the resulting value as a tree with type TYPE. The mpfr precision
10791 is set to the precision of TYPE. We assume that function FUNC
10792 returns zero if the result could be calculated exactly within the
10793 requested precision. */
10794 static tree
10795 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
10796 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
10797 const REAL_VALUE_TYPE *min, bool inclusive)
10798 {
10799 tree result = NULL_TREE;
10800
10801 STRIP_NOPS (arg1);
10802 STRIP_NOPS (arg2);
10803
10804 /* To proceed, MPFR must exactly represent the target floating point
10805 format, which only happens when the target base equals two. */
10806 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10807 && tree_fits_shwi_p (arg1)
10808 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
10809 {
10810 const HOST_WIDE_INT n = tree_to_shwi (arg1);
10811 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
10812
10813 if (n == (long)n
10814 && real_isfinite (ra)
10815 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
10816 {
10817 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10818 const int prec = fmt->p;
10819 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10820 int inexact;
10821 mpfr_t m;
10822
10823 mpfr_init2 (m, prec);
10824 mpfr_from_real (m, ra, GMP_RNDN);
10825 mpfr_clear_flags ();
10826 inexact = func (m, n, m, rnd);
10827 result = do_mpfr_ckconv (m, type, inexact);
10828 mpfr_clear (m);
10829 }
10830 }
10831
10832 return result;
10833 }
10834
10835 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10836 the pointer *(ARG_QUO) and return the result. The type is taken
10837 from the type of ARG0 and is used for setting the precision of the
10838 calculation and results. */
10839
10840 static tree
10841 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10842 {
10843 tree const type = TREE_TYPE (arg0);
10844 tree result = NULL_TREE;
10845
10846 STRIP_NOPS (arg0);
10847 STRIP_NOPS (arg1);
10848
10849 /* To proceed, MPFR must exactly represent the target floating point
10850 format, which only happens when the target base equals two. */
10851 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10852 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10853 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10854 {
10855 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10856 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10857
10858 if (real_isfinite (ra0) && real_isfinite (ra1))
10859 {
10860 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10861 const int prec = fmt->p;
10862 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10863 tree result_rem;
10864 long integer_quo;
10865 mpfr_t m0, m1;
10866
10867 mpfr_inits2 (prec, m0, m1, NULL);
10868 mpfr_from_real (m0, ra0, GMP_RNDN);
10869 mpfr_from_real (m1, ra1, GMP_RNDN);
10870 mpfr_clear_flags ();
10871 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10872 /* Remquo is independent of the rounding mode, so pass
10873 inexact=0 to do_mpfr_ckconv(). */
10874 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10875 mpfr_clears (m0, m1, NULL);
10876 if (result_rem)
10877 {
10878 /* MPFR calculates quo in the host's long so it may
10879 return more bits in quo than the target int can hold
10880 if sizeof(host long) > sizeof(target int). This can
10881 happen even for native compilers in LP64 mode. In
10882 these cases, modulo the quo value with the largest
10883 number that the target int can hold while leaving one
10884 bit for the sign. */
10885 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10886 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10887
10888 /* Dereference the quo pointer argument. */
10889 arg_quo = build_fold_indirect_ref (arg_quo);
10890 /* Proceed iff a valid pointer type was passed in. */
10891 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10892 {
10893 /* Set the value. */
10894 tree result_quo
10895 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10896 build_int_cst (TREE_TYPE (arg_quo),
10897 integer_quo));
10898 TREE_SIDE_EFFECTS (result_quo) = 1;
10899 /* Combine the quo assignment with the rem. */
10900 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10901 result_quo, result_rem));
10902 }
10903 }
10904 }
10905 }
10906 return result;
10907 }
10908
10909 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10910 resulting value as a tree with type TYPE. The mpfr precision is
10911 set to the precision of TYPE. We assume that this mpfr function
10912 returns zero if the result could be calculated exactly within the
10913 requested precision. In addition, the integer pointer represented
10914 by ARG_SG will be dereferenced and set to the appropriate signgam
10915 (-1,1) value. */
10916
10917 static tree
10918 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10919 {
10920 tree result = NULL_TREE;
10921
10922 STRIP_NOPS (arg);
10923
10924 /* To proceed, MPFR must exactly represent the target floating point
10925 format, which only happens when the target base equals two. Also
10926 verify ARG is a constant and that ARG_SG is an int pointer. */
10927 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10928 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10929 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10930 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10931 {
10932 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10933
10934 /* In addition to NaN and Inf, the argument cannot be zero or a
10935 negative integer. */
10936 if (real_isfinite (ra)
10937 && ra->cl != rvc_zero
10938 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10939 {
10940 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10941 const int prec = fmt->p;
10942 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10943 int inexact, sg;
10944 mpfr_t m;
10945 tree result_lg;
10946
10947 mpfr_init2 (m, prec);
10948 mpfr_from_real (m, ra, GMP_RNDN);
10949 mpfr_clear_flags ();
10950 inexact = mpfr_lgamma (m, &sg, m, rnd);
10951 result_lg = do_mpfr_ckconv (m, type, inexact);
10952 mpfr_clear (m);
10953 if (result_lg)
10954 {
10955 tree result_sg;
10956
10957 /* Dereference the arg_sg pointer argument. */
10958 arg_sg = build_fold_indirect_ref (arg_sg);
10959 /* Assign the signgam value into *arg_sg. */
10960 result_sg = fold_build2 (MODIFY_EXPR,
10961 TREE_TYPE (arg_sg), arg_sg,
10962 build_int_cst (TREE_TYPE (arg_sg), sg));
10963 TREE_SIDE_EFFECTS (result_sg) = 1;
10964 /* Combine the signgam assignment with the lgamma result. */
10965 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10966 result_sg, result_lg));
10967 }
10968 }
10969 }
10970
10971 return result;
10972 }
10973
10974 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
10975 function FUNC on it and return the resulting value as a tree with
10976 type TYPE. The mpfr precision is set to the precision of TYPE. We
10977 assume that function FUNC returns zero if the result could be
10978 calculated exactly within the requested precision. */
10979
10980 static tree
10981 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
10982 {
10983 tree result = NULL_TREE;
10984
10985 STRIP_NOPS (arg);
10986
10987 /* To proceed, MPFR must exactly represent the target floating point
10988 format, which only happens when the target base equals two. */
10989 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
10990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
10991 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
10992 {
10993 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
10994 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
10995
10996 if (real_isfinite (re) && real_isfinite (im))
10997 {
10998 const struct real_format *const fmt =
10999 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11000 const int prec = fmt->p;
11001 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11002 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11003 int inexact;
11004 mpc_t m;
11005
11006 mpc_init2 (m, prec);
11007 mpfr_from_real (mpc_realref (m), re, rnd);
11008 mpfr_from_real (mpc_imagref (m), im, rnd);
11009 mpfr_clear_flags ();
11010 inexact = func (m, m, crnd);
11011 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
11012 mpc_clear (m);
11013 }
11014 }
11015
11016 return result;
11017 }
11018
11019 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11020 mpc function FUNC on it and return the resulting value as a tree
11021 with type TYPE. The mpfr precision is set to the precision of
11022 TYPE. We assume that function FUNC returns zero if the result
11023 could be calculated exactly within the requested precision. If
11024 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11025 in the arguments and/or results. */
11026
11027 tree
11028 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11029 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11030 {
11031 tree result = NULL_TREE;
11032
11033 STRIP_NOPS (arg0);
11034 STRIP_NOPS (arg1);
11035
11036 /* To proceed, MPFR must exactly represent the target floating point
11037 format, which only happens when the target base equals two. */
11038 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11039 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11040 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11041 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11042 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11043 {
11044 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11045 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11046 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11047 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11048
11049 if (do_nonfinite
11050 || (real_isfinite (re0) && real_isfinite (im0)
11051 && real_isfinite (re1) && real_isfinite (im1)))
11052 {
11053 const struct real_format *const fmt =
11054 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11055 const int prec = fmt->p;
11056 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11057 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11058 int inexact;
11059 mpc_t m0, m1;
11060
11061 mpc_init2 (m0, prec);
11062 mpc_init2 (m1, prec);
11063 mpfr_from_real (mpc_realref (m0), re0, rnd);
11064 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11065 mpfr_from_real (mpc_realref (m1), re1, rnd);
11066 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11067 mpfr_clear_flags ();
11068 inexact = func (m0, m0, m1, crnd);
11069 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11070 mpc_clear (m0);
11071 mpc_clear (m1);
11072 }
11073 }
11074
11075 return result;
11076 }
11077
11078 /* A wrapper function for builtin folding that prevents warnings for
11079 "statement without effect" and the like, caused by removing the
11080 call node earlier than the warning is generated. */
11081
11082 tree
11083 fold_call_stmt (gcall *stmt, bool ignore)
11084 {
11085 tree ret = NULL_TREE;
11086 tree fndecl = gimple_call_fndecl (stmt);
11087 location_t loc = gimple_location (stmt);
11088 if (fndecl
11089 && TREE_CODE (fndecl) == FUNCTION_DECL
11090 && DECL_BUILT_IN (fndecl)
11091 && !gimple_call_va_arg_pack_p (stmt))
11092 {
11093 int nargs = gimple_call_num_args (stmt);
11094 tree *args = (nargs > 0
11095 ? gimple_call_arg_ptr (stmt, 0)
11096 : &error_mark_node);
11097
11098 if (avoid_folding_inline_builtin (fndecl))
11099 return NULL_TREE;
11100 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11101 {
11102 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11103 }
11104 else
11105 {
11106 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11107 if (ret)
11108 {
11109 /* Propagate location information from original call to
11110 expansion of builtin. Otherwise things like
11111 maybe_emit_chk_warning, that operate on the expansion
11112 of a builtin, will use the wrong location information. */
11113 if (gimple_has_location (stmt))
11114 {
11115 tree realret = ret;
11116 if (TREE_CODE (ret) == NOP_EXPR)
11117 realret = TREE_OPERAND (ret, 0);
11118 if (CAN_HAVE_LOCATION_P (realret)
11119 && !EXPR_HAS_LOCATION (realret))
11120 SET_EXPR_LOCATION (realret, loc);
11121 return realret;
11122 }
11123 return ret;
11124 }
11125 }
11126 }
11127 return NULL_TREE;
11128 }
11129
11130 /* Look up the function in builtin_decl that corresponds to DECL
11131 and set ASMSPEC as its user assembler name. DECL must be a
11132 function decl that declares a builtin. */
11133
11134 void
11135 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11136 {
11137 tree builtin;
11138 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
11139 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
11140 && asmspec != 0);
11141
11142 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11143 set_user_assembler_name (builtin, asmspec);
11144 switch (DECL_FUNCTION_CODE (decl))
11145 {
11146 case BUILT_IN_MEMCPY:
11147 init_block_move_fn (asmspec);
11148 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
11149 break;
11150 case BUILT_IN_MEMSET:
11151 init_block_clear_fn (asmspec);
11152 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
11153 break;
11154 case BUILT_IN_MEMMOVE:
11155 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
11156 break;
11157 case BUILT_IN_MEMCMP:
11158 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
11159 break;
11160 case BUILT_IN_ABORT:
11161 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
11162 break;
11163 case BUILT_IN_FFS:
11164 if (INT_TYPE_SIZE < BITS_PER_WORD)
11165 {
11166 set_user_assembler_libfunc ("ffs", asmspec);
11167 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
11168 MODE_INT, 0), "ffs");
11169 }
11170 break;
11171 default:
11172 break;
11173 }
11174 }
11175
11176 /* Return true if DECL is a builtin that expands to a constant or similarly
11177 simple code. */
11178 bool
11179 is_simple_builtin (tree decl)
11180 {
11181 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11182 switch (DECL_FUNCTION_CODE (decl))
11183 {
11184 /* Builtins that expand to constants. */
11185 case BUILT_IN_CONSTANT_P:
11186 case BUILT_IN_EXPECT:
11187 case BUILT_IN_OBJECT_SIZE:
11188 case BUILT_IN_UNREACHABLE:
11189 /* Simple register moves or loads from stack. */
11190 case BUILT_IN_ASSUME_ALIGNED:
11191 case BUILT_IN_RETURN_ADDRESS:
11192 case BUILT_IN_EXTRACT_RETURN_ADDR:
11193 case BUILT_IN_FROB_RETURN_ADDR:
11194 case BUILT_IN_RETURN:
11195 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11196 case BUILT_IN_FRAME_ADDRESS:
11197 case BUILT_IN_VA_END:
11198 case BUILT_IN_STACK_SAVE:
11199 case BUILT_IN_STACK_RESTORE:
11200 /* Exception state returns or moves registers around. */
11201 case BUILT_IN_EH_FILTER:
11202 case BUILT_IN_EH_POINTER:
11203 case BUILT_IN_EH_COPY_VALUES:
11204 return true;
11205
11206 default:
11207 return false;
11208 }
11209
11210 return false;
11211 }
11212
11213 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11214 most probably expanded inline into reasonably simple code. This is a
11215 superset of is_simple_builtin. */
11216 bool
11217 is_inexpensive_builtin (tree decl)
11218 {
11219 if (!decl)
11220 return false;
11221 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11222 return true;
11223 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11224 switch (DECL_FUNCTION_CODE (decl))
11225 {
11226 case BUILT_IN_ABS:
11227 case BUILT_IN_ALLOCA:
11228 case BUILT_IN_ALLOCA_WITH_ALIGN:
11229 case BUILT_IN_BSWAP16:
11230 case BUILT_IN_BSWAP32:
11231 case BUILT_IN_BSWAP64:
11232 case BUILT_IN_CLZ:
11233 case BUILT_IN_CLZIMAX:
11234 case BUILT_IN_CLZL:
11235 case BUILT_IN_CLZLL:
11236 case BUILT_IN_CTZ:
11237 case BUILT_IN_CTZIMAX:
11238 case BUILT_IN_CTZL:
11239 case BUILT_IN_CTZLL:
11240 case BUILT_IN_FFS:
11241 case BUILT_IN_FFSIMAX:
11242 case BUILT_IN_FFSL:
11243 case BUILT_IN_FFSLL:
11244 case BUILT_IN_IMAXABS:
11245 case BUILT_IN_FINITE:
11246 case BUILT_IN_FINITEF:
11247 case BUILT_IN_FINITEL:
11248 case BUILT_IN_FINITED32:
11249 case BUILT_IN_FINITED64:
11250 case BUILT_IN_FINITED128:
11251 case BUILT_IN_FPCLASSIFY:
11252 case BUILT_IN_ISFINITE:
11253 case BUILT_IN_ISINF_SIGN:
11254 case BUILT_IN_ISINF:
11255 case BUILT_IN_ISINFF:
11256 case BUILT_IN_ISINFL:
11257 case BUILT_IN_ISINFD32:
11258 case BUILT_IN_ISINFD64:
11259 case BUILT_IN_ISINFD128:
11260 case BUILT_IN_ISNAN:
11261 case BUILT_IN_ISNANF:
11262 case BUILT_IN_ISNANL:
11263 case BUILT_IN_ISNAND32:
11264 case BUILT_IN_ISNAND64:
11265 case BUILT_IN_ISNAND128:
11266 case BUILT_IN_ISNORMAL:
11267 case BUILT_IN_ISGREATER:
11268 case BUILT_IN_ISGREATEREQUAL:
11269 case BUILT_IN_ISLESS:
11270 case BUILT_IN_ISLESSEQUAL:
11271 case BUILT_IN_ISLESSGREATER:
11272 case BUILT_IN_ISUNORDERED:
11273 case BUILT_IN_VA_ARG_PACK:
11274 case BUILT_IN_VA_ARG_PACK_LEN:
11275 case BUILT_IN_VA_COPY:
11276 case BUILT_IN_TRAP:
11277 case BUILT_IN_SAVEREGS:
11278 case BUILT_IN_POPCOUNTL:
11279 case BUILT_IN_POPCOUNTLL:
11280 case BUILT_IN_POPCOUNTIMAX:
11281 case BUILT_IN_POPCOUNT:
11282 case BUILT_IN_PARITYL:
11283 case BUILT_IN_PARITYLL:
11284 case BUILT_IN_PARITYIMAX:
11285 case BUILT_IN_PARITY:
11286 case BUILT_IN_LABS:
11287 case BUILT_IN_LLABS:
11288 case BUILT_IN_PREFETCH:
11289 case BUILT_IN_ACC_ON_DEVICE:
11290 return true;
11291
11292 default:
11293 return is_simple_builtin (decl);
11294 }
11295
11296 return false;
11297 }