Split constant handling out of fold_builtin_fma
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "predict.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "tree-object-size.h"
39 #include "realmpfr.h"
40 #include "cfgrtl.h"
41 #include "internal-fn.h"
42 #include "flags.h"
43 #include "regs.h"
44 #include "except.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "cgraph.h"
69 #include "tree-chkp.h"
70 #include "rtl-chkp.h"
71
72
73 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89 #undef DEF_BUILTIN
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, machine_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
134 machine_mode, int, tree);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree, bool);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_bitop (tree, tree);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_isascii (location_t, tree);
166 static tree fold_builtin_toascii (location_t, tree);
167 static tree fold_builtin_isdigit (location_t, tree);
168 static tree fold_builtin_fabs (location_t, tree, tree);
169 static tree fold_builtin_abs (location_t, tree, tree);
170 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
171 enum tree_code);
172 static tree fold_builtin_0 (location_t, tree);
173 static tree fold_builtin_1 (location_t, tree, tree);
174 static tree fold_builtin_2 (location_t, tree, tree, tree);
175 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_varargs (location_t, tree, tree*, int);
177
178 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
179 static tree fold_builtin_strstr (location_t, tree, tree, tree);
180 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
181 static tree fold_builtin_strspn (location_t, tree, tree);
182 static tree fold_builtin_strcspn (location_t, tree, tree);
183
184 static rtx expand_builtin_object_size (tree);
185 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
186 enum built_in_function);
187 static void maybe_emit_chk_warning (tree, enum built_in_function);
188 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
189 static void maybe_emit_free_warning (tree);
190 static tree fold_builtin_object_size (tree, tree);
191
192 unsigned HOST_WIDE_INT target_newline;
193 unsigned HOST_WIDE_INT target_percent;
194 static unsigned HOST_WIDE_INT target_c;
195 static unsigned HOST_WIDE_INT target_s;
196 char target_percent_c[3];
197 char target_percent_s[3];
198 char target_percent_s_newline[4];
199 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
200 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
201 static tree do_mpfr_arg2 (tree, tree, tree,
202 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
203 static tree do_mpfr_arg3 (tree, tree, tree, tree,
204 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
205 static tree do_mpfr_sincos (tree, tree, tree);
206 static tree do_mpfr_bessel_n (tree, tree, tree,
207 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
208 const REAL_VALUE_TYPE *, bool);
209 static tree do_mpfr_remquo (tree, tree, tree);
210 static tree do_mpfr_lgamma_r (tree, tree, tree);
211 static void expand_builtin_sync_synchronize (void);
212
213 /* Return true if NAME starts with __builtin_ or __sync_. */
214
215 static bool
216 is_builtin_name (const char *name)
217 {
218 if (strncmp (name, "__builtin_", 10) == 0)
219 return true;
220 if (strncmp (name, "__sync_", 7) == 0)
221 return true;
222 if (strncmp (name, "__atomic_", 9) == 0)
223 return true;
224 if (flag_cilkplus
225 && (!strcmp (name, "__cilkrts_detach")
226 || !strcmp (name, "__cilkrts_pop_frame")))
227 return true;
228 return false;
229 }
230
231
232 /* Return true if DECL is a function symbol representing a built-in. */
233
234 bool
235 is_builtin_fn (tree decl)
236 {
237 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
238 }
239
240 /* Return true if NODE should be considered for inline expansion regardless
241 of the optimization level. This means whenever a function is invoked with
242 its "internal" name, which normally contains the prefix "__builtin". */
243
244 static bool
245 called_as_built_in (tree node)
246 {
247 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
248 we want the name used to call the function, not the name it
249 will have. */
250 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
251 return is_builtin_name (name);
252 }
253
254 /* Compute values M and N such that M divides (address of EXP - N) and such
255 that N < M. If these numbers can be determined, store M in alignp and N in
256 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
257 *alignp and any bit-offset to *bitposp.
258
259 Note that the address (and thus the alignment) computed here is based
260 on the address to which a symbol resolves, whereas DECL_ALIGN is based
261 on the address at which an object is actually located. These two
262 addresses are not always the same. For example, on ARM targets,
263 the address &foo of a Thumb function foo() has the lowest bit set,
264 whereas foo() itself starts on an even address.
265
266 If ADDR_P is true we are taking the address of the memory reference EXP
267 and thus cannot rely on the access taking place. */
268
269 static bool
270 get_object_alignment_2 (tree exp, unsigned int *alignp,
271 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
272 {
273 HOST_WIDE_INT bitsize, bitpos;
274 tree offset;
275 machine_mode mode;
276 int unsignedp, volatilep;
277 unsigned int align = BITS_PER_UNIT;
278 bool known_alignment = false;
279
280 /* Get the innermost object and the constant (bitpos) and possibly
281 variable (offset) offset of the access. */
282 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
283 &mode, &unsignedp, &volatilep, true);
284
285 /* Extract alignment information from the innermost object and
286 possibly adjust bitpos and offset. */
287 if (TREE_CODE (exp) == FUNCTION_DECL)
288 {
289 /* Function addresses can encode extra information besides their
290 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
291 allows the low bit to be used as a virtual bit, we know
292 that the address itself must be at least 2-byte aligned. */
293 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
294 align = 2 * BITS_PER_UNIT;
295 }
296 else if (TREE_CODE (exp) == LABEL_DECL)
297 ;
298 else if (TREE_CODE (exp) == CONST_DECL)
299 {
300 /* The alignment of a CONST_DECL is determined by its initializer. */
301 exp = DECL_INITIAL (exp);
302 align = TYPE_ALIGN (TREE_TYPE (exp));
303 if (CONSTANT_CLASS_P (exp))
304 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
305
306 known_alignment = true;
307 }
308 else if (DECL_P (exp))
309 {
310 align = DECL_ALIGN (exp);
311 known_alignment = true;
312 }
313 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
314 {
315 align = TYPE_ALIGN (TREE_TYPE (exp));
316 }
317 else if (TREE_CODE (exp) == INDIRECT_REF
318 || TREE_CODE (exp) == MEM_REF
319 || TREE_CODE (exp) == TARGET_MEM_REF)
320 {
321 tree addr = TREE_OPERAND (exp, 0);
322 unsigned ptr_align;
323 unsigned HOST_WIDE_INT ptr_bitpos;
324 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
325
326 /* If the address is explicitely aligned, handle that. */
327 if (TREE_CODE (addr) == BIT_AND_EXPR
328 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
329 {
330 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
331 ptr_bitmask *= BITS_PER_UNIT;
332 align = ptr_bitmask & -ptr_bitmask;
333 addr = TREE_OPERAND (addr, 0);
334 }
335
336 known_alignment
337 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
338 align = MAX (ptr_align, align);
339
340 /* Re-apply explicit alignment to the bitpos. */
341 ptr_bitpos &= ptr_bitmask;
342
343 /* The alignment of the pointer operand in a TARGET_MEM_REF
344 has to take the variable offset parts into account. */
345 if (TREE_CODE (exp) == TARGET_MEM_REF)
346 {
347 if (TMR_INDEX (exp))
348 {
349 unsigned HOST_WIDE_INT step = 1;
350 if (TMR_STEP (exp))
351 step = TREE_INT_CST_LOW (TMR_STEP (exp));
352 align = MIN (align, (step & -step) * BITS_PER_UNIT);
353 }
354 if (TMR_INDEX2 (exp))
355 align = BITS_PER_UNIT;
356 known_alignment = false;
357 }
358
359 /* When EXP is an actual memory reference then we can use
360 TYPE_ALIGN of a pointer indirection to derive alignment.
361 Do so only if get_pointer_alignment_1 did not reveal absolute
362 alignment knowledge and if using that alignment would
363 improve the situation. */
364 if (!addr_p && !known_alignment
365 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
366 align = TYPE_ALIGN (TREE_TYPE (exp));
367 else
368 {
369 /* Else adjust bitpos accordingly. */
370 bitpos += ptr_bitpos;
371 if (TREE_CODE (exp) == MEM_REF
372 || TREE_CODE (exp) == TARGET_MEM_REF)
373 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
374 }
375 }
376 else if (TREE_CODE (exp) == STRING_CST)
377 {
378 /* STRING_CST are the only constant objects we allow to be not
379 wrapped inside a CONST_DECL. */
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 if (CONSTANT_CLASS_P (exp))
382 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
383
384 known_alignment = true;
385 }
386
387 /* If there is a non-constant offset part extract the maximum
388 alignment that can prevail. */
389 if (offset)
390 {
391 unsigned int trailing_zeros = tree_ctz (offset);
392 if (trailing_zeros < HOST_BITS_PER_INT)
393 {
394 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
395 if (inner)
396 align = MIN (align, inner);
397 }
398 }
399
400 *alignp = align;
401 *bitposp = bitpos & (*alignp - 1);
402 return known_alignment;
403 }
404
405 /* For a memory reference expression EXP compute values M and N such that M
406 divides (&EXP - N) and such that N < M. If these numbers can be determined,
407 store M in alignp and N in *BITPOSP and return true. Otherwise return false
408 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
409
410 bool
411 get_object_alignment_1 (tree exp, unsigned int *alignp,
412 unsigned HOST_WIDE_INT *bitposp)
413 {
414 return get_object_alignment_2 (exp, alignp, bitposp, false);
415 }
416
417 /* Return the alignment in bits of EXP, an object. */
418
419 unsigned int
420 get_object_alignment (tree exp)
421 {
422 unsigned HOST_WIDE_INT bitpos = 0;
423 unsigned int align;
424
425 get_object_alignment_1 (exp, &align, &bitpos);
426
427 /* align and bitpos now specify known low bits of the pointer.
428 ptr & (align - 1) == bitpos. */
429
430 if (bitpos != 0)
431 align = (bitpos & -bitpos);
432 return align;
433 }
434
435 /* For a pointer valued expression EXP compute values M and N such that M
436 divides (EXP - N) and such that N < M. If these numbers can be determined,
437 store M in alignp and N in *BITPOSP and return true. Return false if
438 the results are just a conservative approximation.
439
440 If EXP is not a pointer, false is returned too. */
441
442 bool
443 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
444 unsigned HOST_WIDE_INT *bitposp)
445 {
446 STRIP_NOPS (exp);
447
448 if (TREE_CODE (exp) == ADDR_EXPR)
449 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
450 alignp, bitposp, true);
451 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
452 {
453 unsigned int align;
454 unsigned HOST_WIDE_INT bitpos;
455 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
456 &align, &bitpos);
457 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
458 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
459 else
460 {
461 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
462 if (trailing_zeros < HOST_BITS_PER_INT)
463 {
464 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
465 if (inner)
466 align = MIN (align, inner);
467 }
468 }
469 *alignp = align;
470 *bitposp = bitpos & (align - 1);
471 return res;
472 }
473 else if (TREE_CODE (exp) == SSA_NAME
474 && POINTER_TYPE_P (TREE_TYPE (exp)))
475 {
476 unsigned int ptr_align, ptr_misalign;
477 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
478
479 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
480 {
481 *bitposp = ptr_misalign * BITS_PER_UNIT;
482 *alignp = ptr_align * BITS_PER_UNIT;
483 /* We cannot really tell whether this result is an approximation. */
484 return true;
485 }
486 else
487 {
488 *bitposp = 0;
489 *alignp = BITS_PER_UNIT;
490 return false;
491 }
492 }
493 else if (TREE_CODE (exp) == INTEGER_CST)
494 {
495 *alignp = BIGGEST_ALIGNMENT;
496 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
497 & (BIGGEST_ALIGNMENT - 1));
498 return true;
499 }
500
501 *bitposp = 0;
502 *alignp = BITS_PER_UNIT;
503 return false;
504 }
505
506 /* Return the alignment in bits of EXP, a pointer valued expression.
507 The alignment returned is, by default, the alignment of the thing that
508 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
509
510 Otherwise, look at the expression to see if we can do better, i.e., if the
511 expression is actually pointing at an object whose alignment is tighter. */
512
513 unsigned int
514 get_pointer_alignment (tree exp)
515 {
516 unsigned HOST_WIDE_INT bitpos = 0;
517 unsigned int align;
518
519 get_pointer_alignment_1 (exp, &align, &bitpos);
520
521 /* align and bitpos now specify known low bits of the pointer.
522 ptr & (align - 1) == bitpos. */
523
524 if (bitpos != 0)
525 align = (bitpos & -bitpos);
526
527 return align;
528 }
529
530 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
531 way, because it could contain a zero byte in the middle.
532 TREE_STRING_LENGTH is the size of the character array, not the string.
533
534 ONLY_VALUE should be nonzero if the result is not going to be emitted
535 into the instruction stream and zero if it is going to be expanded.
536 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
537 is returned, otherwise NULL, since
538 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
539 evaluate the side-effects.
540
541 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
542 accesses. Note that this implies the result is not going to be emitted
543 into the instruction stream.
544
545 The value returned is of type `ssizetype'.
546
547 Unfortunately, string_constant can't access the values of const char
548 arrays with initializers, so neither can we do so here. */
549
550 tree
551 c_strlen (tree src, int only_value)
552 {
553 tree offset_node;
554 HOST_WIDE_INT offset;
555 int max;
556 const char *ptr;
557 location_t loc;
558
559 STRIP_NOPS (src);
560 if (TREE_CODE (src) == COND_EXPR
561 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
562 {
563 tree len1, len2;
564
565 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
566 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
567 if (tree_int_cst_equal (len1, len2))
568 return len1;
569 }
570
571 if (TREE_CODE (src) == COMPOUND_EXPR
572 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
573 return c_strlen (TREE_OPERAND (src, 1), only_value);
574
575 loc = EXPR_LOC_OR_LOC (src, input_location);
576
577 src = string_constant (src, &offset_node);
578 if (src == 0)
579 return NULL_TREE;
580
581 max = TREE_STRING_LENGTH (src) - 1;
582 ptr = TREE_STRING_POINTER (src);
583
584 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
585 {
586 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
587 compute the offset to the following null if we don't know where to
588 start searching for it. */
589 int i;
590
591 for (i = 0; i < max; i++)
592 if (ptr[i] == 0)
593 return NULL_TREE;
594
595 /* We don't know the starting offset, but we do know that the string
596 has no internal zero bytes. We can assume that the offset falls
597 within the bounds of the string; otherwise, the programmer deserves
598 what he gets. Subtract the offset from the length of the string,
599 and return that. This would perhaps not be valid if we were dealing
600 with named arrays in addition to literal string constants. */
601
602 return size_diffop_loc (loc, size_int (max), offset_node);
603 }
604
605 /* We have a known offset into the string. Start searching there for
606 a null character if we can represent it as a single HOST_WIDE_INT. */
607 if (offset_node == 0)
608 offset = 0;
609 else if (! tree_fits_shwi_p (offset_node))
610 offset = -1;
611 else
612 offset = tree_to_shwi (offset_node);
613
614 /* If the offset is known to be out of bounds, warn, and call strlen at
615 runtime. */
616 if (offset < 0 || offset > max)
617 {
618 /* Suppress multiple warnings for propagated constant strings. */
619 if (only_value != 2
620 && !TREE_NO_WARNING (src))
621 {
622 warning_at (loc, 0, "offset outside bounds of constant string");
623 TREE_NO_WARNING (src) = 1;
624 }
625 return NULL_TREE;
626 }
627
628 /* Use strlen to search for the first zero byte. Since any strings
629 constructed with build_string will have nulls appended, we win even
630 if we get handed something like (char[4])"abcd".
631
632 Since OFFSET is our starting index into the string, no further
633 calculation is needed. */
634 return ssize_int (strlen (ptr + offset));
635 }
636
637 /* Return a char pointer for a C string if it is a string constant
638 or sum of string constant and integer constant. */
639
640 const char *
641 c_getstr (tree src)
642 {
643 tree offset_node;
644
645 src = string_constant (src, &offset_node);
646 if (src == 0)
647 return 0;
648
649 if (offset_node == 0)
650 return TREE_STRING_POINTER (src);
651 else if (!tree_fits_uhwi_p (offset_node)
652 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
653 return 0;
654
655 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
656 }
657
658 /* Return a constant integer corresponding to target reading
659 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
660
661 static rtx
662 c_readstr (const char *str, machine_mode mode)
663 {
664 HOST_WIDE_INT ch;
665 unsigned int i, j;
666 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
667
668 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
669 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
670 / HOST_BITS_PER_WIDE_INT;
671
672 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
673 for (i = 0; i < len; i++)
674 tmp[i] = 0;
675
676 ch = 1;
677 for (i = 0; i < GET_MODE_SIZE (mode); i++)
678 {
679 j = i;
680 if (WORDS_BIG_ENDIAN)
681 j = GET_MODE_SIZE (mode) - i - 1;
682 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
683 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
684 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
685 j *= BITS_PER_UNIT;
686
687 if (ch)
688 ch = (unsigned char) str[i];
689 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
690 }
691
692 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
693 return immed_wide_int_const (c, mode);
694 }
695
696 /* Cast a target constant CST to target CHAR and if that value fits into
697 host char type, return zero and put that value into variable pointed to by
698 P. */
699
700 static int
701 target_char_cast (tree cst, char *p)
702 {
703 unsigned HOST_WIDE_INT val, hostval;
704
705 if (TREE_CODE (cst) != INTEGER_CST
706 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
707 return 1;
708
709 /* Do not care if it fits or not right here. */
710 val = TREE_INT_CST_LOW (cst);
711
712 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
713 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
714
715 hostval = val;
716 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
717 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
718
719 if (val != hostval)
720 return 1;
721
722 *p = hostval;
723 return 0;
724 }
725
726 /* Similar to save_expr, but assumes that arbitrary code is not executed
727 in between the multiple evaluations. In particular, we assume that a
728 non-addressable local variable will not be modified. */
729
730 static tree
731 builtin_save_expr (tree exp)
732 {
733 if (TREE_CODE (exp) == SSA_NAME
734 || (TREE_ADDRESSABLE (exp) == 0
735 && (TREE_CODE (exp) == PARM_DECL
736 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
737 return exp;
738
739 return save_expr (exp);
740 }
741
742 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
743 times to get the address of either a higher stack frame, or a return
744 address located within it (depending on FNDECL_CODE). */
745
746 static rtx
747 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
748 {
749 int i;
750 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
751 if (tem == NULL_RTX)
752 {
753 /* For a zero count with __builtin_return_address, we don't care what
754 frame address we return, because target-specific definitions will
755 override us. Therefore frame pointer elimination is OK, and using
756 the soft frame pointer is OK.
757
758 For a nonzero count, or a zero count with __builtin_frame_address,
759 we require a stable offset from the current frame pointer to the
760 previous one, so we must use the hard frame pointer, and
761 we must disable frame pointer elimination. */
762 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
763 tem = frame_pointer_rtx;
764 else
765 {
766 tem = hard_frame_pointer_rtx;
767
768 /* Tell reload not to eliminate the frame pointer. */
769 crtl->accesses_prior_frames = 1;
770 }
771 }
772
773 if (count > 0)
774 SETUP_FRAME_ADDRESSES ();
775
776 /* On the SPARC, the return address is not in the frame, it is in a
777 register. There is no way to access it off of the current frame
778 pointer, but it can be accessed off the previous frame pointer by
779 reading the value from the register window save area. */
780 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
781 count--;
782
783 /* Scan back COUNT frames to the specified frame. */
784 for (i = 0; i < count; i++)
785 {
786 /* Assume the dynamic chain pointer is in the word that the
787 frame address points to, unless otherwise specified. */
788 tem = DYNAMIC_CHAIN_ADDRESS (tem);
789 tem = memory_address (Pmode, tem);
790 tem = gen_frame_mem (Pmode, tem);
791 tem = copy_to_reg (tem);
792 }
793
794 /* For __builtin_frame_address, return what we've got. But, on
795 the SPARC for example, we may have to add a bias. */
796 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
797 return FRAME_ADDR_RTX (tem);
798
799 /* For __builtin_return_address, get the return address from that frame. */
800 #ifdef RETURN_ADDR_RTX
801 tem = RETURN_ADDR_RTX (count, tem);
802 #else
803 tem = memory_address (Pmode,
804 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
805 tem = gen_frame_mem (Pmode, tem);
806 #endif
807 return tem;
808 }
809
810 /* Alias set used for setjmp buffer. */
811 static alias_set_type setjmp_alias_set = -1;
812
813 /* Construct the leading half of a __builtin_setjmp call. Control will
814 return to RECEIVER_LABEL. This is also called directly by the SJLJ
815 exception handling code. */
816
817 void
818 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
819 {
820 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
821 rtx stack_save;
822 rtx mem;
823
824 if (setjmp_alias_set == -1)
825 setjmp_alias_set = new_alias_set ();
826
827 buf_addr = convert_memory_address (Pmode, buf_addr);
828
829 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
830
831 /* We store the frame pointer and the address of receiver_label in
832 the buffer and use the rest of it for the stack save area, which
833 is machine-dependent. */
834
835 mem = gen_rtx_MEM (Pmode, buf_addr);
836 set_mem_alias_set (mem, setjmp_alias_set);
837 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
838
839 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
840 GET_MODE_SIZE (Pmode))),
841 set_mem_alias_set (mem, setjmp_alias_set);
842
843 emit_move_insn (validize_mem (mem),
844 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
845
846 stack_save = gen_rtx_MEM (sa_mode,
847 plus_constant (Pmode, buf_addr,
848 2 * GET_MODE_SIZE (Pmode)));
849 set_mem_alias_set (stack_save, setjmp_alias_set);
850 emit_stack_save (SAVE_NONLOCAL, &stack_save);
851
852 /* If there is further processing to do, do it. */
853 if (targetm.have_builtin_setjmp_setup ())
854 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
855
856 /* We have a nonlocal label. */
857 cfun->has_nonlocal_label = 1;
858 }
859
860 /* Construct the trailing part of a __builtin_setjmp call. This is
861 also called directly by the SJLJ exception handling code.
862 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
863
864 void
865 expand_builtin_setjmp_receiver (rtx receiver_label)
866 {
867 rtx chain;
868
869 /* Mark the FP as used when we get here, so we have to make sure it's
870 marked as used by this function. */
871 emit_use (hard_frame_pointer_rtx);
872
873 /* Mark the static chain as clobbered here so life information
874 doesn't get messed up for it. */
875 chain = targetm.calls.static_chain (current_function_decl, true);
876 if (chain && REG_P (chain))
877 emit_clobber (chain);
878
879 /* Now put in the code to restore the frame pointer, and argument
880 pointer, if needed. */
881 if (! targetm.have_nonlocal_goto ())
882 {
883 /* First adjust our frame pointer to its actual value. It was
884 previously set to the start of the virtual area corresponding to
885 the stacked variables when we branched here and now needs to be
886 adjusted to the actual hardware fp value.
887
888 Assignments to virtual registers are converted by
889 instantiate_virtual_regs into the corresponding assignment
890 to the underlying register (fp in this case) that makes
891 the original assignment true.
892 So the following insn will actually be decrementing fp by
893 STARTING_FRAME_OFFSET. */
894 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
895
896 /* Restoring the frame pointer also modifies the hard frame pointer.
897 Mark it used (so that the previous assignment remains live once
898 the frame pointer is eliminated) and clobbered (to represent the
899 implicit update from the assignment). */
900 emit_use (hard_frame_pointer_rtx);
901 emit_clobber (hard_frame_pointer_rtx);
902 }
903
904 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
905 {
906 #ifdef ELIMINABLE_REGS
907 /* If the argument pointer can be eliminated in favor of the
908 frame pointer, we don't need to restore it. We assume here
909 that if such an elimination is present, it can always be used.
910 This is the case on all known machines; if we don't make this
911 assumption, we do unnecessary saving on many machines. */
912 size_t i;
913 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
914
915 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
916 if (elim_regs[i].from == ARG_POINTER_REGNUM
917 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
918 break;
919
920 if (i == ARRAY_SIZE (elim_regs))
921 #endif
922 {
923 /* Now restore our arg pointer from the address at which it
924 was saved in our stack frame. */
925 emit_move_insn (crtl->args.internal_arg_pointer,
926 copy_to_reg (get_arg_pointer_save_area ()));
927 }
928 }
929
930 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
931 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
932 else if (targetm.have_nonlocal_goto_receiver ())
933 emit_insn (targetm.gen_nonlocal_goto_receiver ());
934 else
935 { /* Nothing */ }
936
937 /* We must not allow the code we just generated to be reordered by
938 scheduling. Specifically, the update of the frame pointer must
939 happen immediately, not later. */
940 emit_insn (gen_blockage ());
941 }
942
943 /* __builtin_longjmp is passed a pointer to an array of five words (not
944 all will be used on all machines). It operates similarly to the C
945 library function of the same name, but is more efficient. Much of
946 the code below is copied from the handling of non-local gotos. */
947
948 static void
949 expand_builtin_longjmp (rtx buf_addr, rtx value)
950 {
951 rtx fp, lab, stack;
952 rtx_insn *insn, *last;
953 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
954
955 /* DRAP is needed for stack realign if longjmp is expanded to current
956 function */
957 if (SUPPORTS_STACK_ALIGNMENT)
958 crtl->need_drap = true;
959
960 if (setjmp_alias_set == -1)
961 setjmp_alias_set = new_alias_set ();
962
963 buf_addr = convert_memory_address (Pmode, buf_addr);
964
965 buf_addr = force_reg (Pmode, buf_addr);
966
967 /* We require that the user must pass a second argument of 1, because
968 that is what builtin_setjmp will return. */
969 gcc_assert (value == const1_rtx);
970
971 last = get_last_insn ();
972 if (targetm.have_builtin_longjmp ())
973 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
974 else
975 {
976 fp = gen_rtx_MEM (Pmode, buf_addr);
977 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
978 GET_MODE_SIZE (Pmode)));
979
980 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
981 2 * GET_MODE_SIZE (Pmode)));
982 set_mem_alias_set (fp, setjmp_alias_set);
983 set_mem_alias_set (lab, setjmp_alias_set);
984 set_mem_alias_set (stack, setjmp_alias_set);
985
986 /* Pick up FP, label, and SP from the block and jump. This code is
987 from expand_goto in stmt.c; see there for detailed comments. */
988 if (targetm.have_nonlocal_goto ())
989 /* We have to pass a value to the nonlocal_goto pattern that will
990 get copied into the static_chain pointer, but it does not matter
991 what that value is, because builtin_setjmp does not use it. */
992 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
993 else
994 {
995 lab = copy_to_reg (lab);
996
997 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
998 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
999
1000 emit_move_insn (hard_frame_pointer_rtx, fp);
1001 emit_stack_restore (SAVE_NONLOCAL, stack);
1002
1003 emit_use (hard_frame_pointer_rtx);
1004 emit_use (stack_pointer_rtx);
1005 emit_indirect_jump (lab);
1006 }
1007 }
1008
1009 /* Search backwards and mark the jump insn as a non-local goto.
1010 Note that this precludes the use of __builtin_longjmp to a
1011 __builtin_setjmp target in the same function. However, we've
1012 already cautioned the user that these functions are for
1013 internal exception handling use only. */
1014 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1015 {
1016 gcc_assert (insn != last);
1017
1018 if (JUMP_P (insn))
1019 {
1020 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1021 break;
1022 }
1023 else if (CALL_P (insn))
1024 break;
1025 }
1026 }
1027
1028 static inline bool
1029 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1030 {
1031 return (iter->i < iter->n);
1032 }
1033
1034 /* This function validates the types of a function call argument list
1035 against a specified list of tree_codes. If the last specifier is a 0,
1036 that represents an ellipses, otherwise the last specifier must be a
1037 VOID_TYPE. */
1038
1039 static bool
1040 validate_arglist (const_tree callexpr, ...)
1041 {
1042 enum tree_code code;
1043 bool res = 0;
1044 va_list ap;
1045 const_call_expr_arg_iterator iter;
1046 const_tree arg;
1047
1048 va_start (ap, callexpr);
1049 init_const_call_expr_arg_iterator (callexpr, &iter);
1050
1051 do
1052 {
1053 code = (enum tree_code) va_arg (ap, int);
1054 switch (code)
1055 {
1056 case 0:
1057 /* This signifies an ellipses, any further arguments are all ok. */
1058 res = true;
1059 goto end;
1060 case VOID_TYPE:
1061 /* This signifies an endlink, if no arguments remain, return
1062 true, otherwise return false. */
1063 res = !more_const_call_expr_args_p (&iter);
1064 goto end;
1065 default:
1066 /* If no parameters remain or the parameter's code does not
1067 match the specified code, return false. Otherwise continue
1068 checking any remaining arguments. */
1069 arg = next_const_call_expr_arg (&iter);
1070 if (!validate_arg (arg, code))
1071 goto end;
1072 break;
1073 }
1074 }
1075 while (1);
1076
1077 /* We need gotos here since we can only have one VA_CLOSE in a
1078 function. */
1079 end: ;
1080 va_end (ap);
1081
1082 return res;
1083 }
1084
1085 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1086 and the address of the save area. */
1087
1088 static rtx
1089 expand_builtin_nonlocal_goto (tree exp)
1090 {
1091 tree t_label, t_save_area;
1092 rtx r_label, r_save_area, r_fp, r_sp;
1093 rtx_insn *insn;
1094
1095 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1096 return NULL_RTX;
1097
1098 t_label = CALL_EXPR_ARG (exp, 0);
1099 t_save_area = CALL_EXPR_ARG (exp, 1);
1100
1101 r_label = expand_normal (t_label);
1102 r_label = convert_memory_address (Pmode, r_label);
1103 r_save_area = expand_normal (t_save_area);
1104 r_save_area = convert_memory_address (Pmode, r_save_area);
1105 /* Copy the address of the save location to a register just in case it was
1106 based on the frame pointer. */
1107 r_save_area = copy_to_reg (r_save_area);
1108 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1109 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1110 plus_constant (Pmode, r_save_area,
1111 GET_MODE_SIZE (Pmode)));
1112
1113 crtl->has_nonlocal_goto = 1;
1114
1115 /* ??? We no longer need to pass the static chain value, afaik. */
1116 if (targetm.have_nonlocal_goto ())
1117 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1118 else
1119 {
1120 r_label = copy_to_reg (r_label);
1121
1122 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1123 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1124
1125 /* Restore frame pointer for containing function. */
1126 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1127 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1128
1129 /* USE of hard_frame_pointer_rtx added for consistency;
1130 not clear if really needed. */
1131 emit_use (hard_frame_pointer_rtx);
1132 emit_use (stack_pointer_rtx);
1133
1134 /* If the architecture is using a GP register, we must
1135 conservatively assume that the target function makes use of it.
1136 The prologue of functions with nonlocal gotos must therefore
1137 initialize the GP register to the appropriate value, and we
1138 must then make sure that this value is live at the point
1139 of the jump. (Note that this doesn't necessarily apply
1140 to targets with a nonlocal_goto pattern; they are free
1141 to implement it in their own way. Note also that this is
1142 a no-op if the GP register is a global invariant.) */
1143 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1144 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1145 emit_use (pic_offset_table_rtx);
1146
1147 emit_indirect_jump (r_label);
1148 }
1149
1150 /* Search backwards to the jump insn and mark it as a
1151 non-local goto. */
1152 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1153 {
1154 if (JUMP_P (insn))
1155 {
1156 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1157 break;
1158 }
1159 else if (CALL_P (insn))
1160 break;
1161 }
1162
1163 return const0_rtx;
1164 }
1165
1166 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1167 (not all will be used on all machines) that was passed to __builtin_setjmp.
1168 It updates the stack pointer in that block to the current value. This is
1169 also called directly by the SJLJ exception handling code. */
1170
1171 void
1172 expand_builtin_update_setjmp_buf (rtx buf_addr)
1173 {
1174 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1175 rtx stack_save
1176 = gen_rtx_MEM (sa_mode,
1177 memory_address
1178 (sa_mode,
1179 plus_constant (Pmode, buf_addr,
1180 2 * GET_MODE_SIZE (Pmode))));
1181
1182 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1183 }
1184
1185 /* Expand a call to __builtin_prefetch. For a target that does not support
1186 data prefetch, evaluate the memory address argument in case it has side
1187 effects. */
1188
1189 static void
1190 expand_builtin_prefetch (tree exp)
1191 {
1192 tree arg0, arg1, arg2;
1193 int nargs;
1194 rtx op0, op1, op2;
1195
1196 if (!validate_arglist (exp, POINTER_TYPE, 0))
1197 return;
1198
1199 arg0 = CALL_EXPR_ARG (exp, 0);
1200
1201 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1202 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1203 locality). */
1204 nargs = call_expr_nargs (exp);
1205 if (nargs > 1)
1206 arg1 = CALL_EXPR_ARG (exp, 1);
1207 else
1208 arg1 = integer_zero_node;
1209 if (nargs > 2)
1210 arg2 = CALL_EXPR_ARG (exp, 2);
1211 else
1212 arg2 = integer_three_node;
1213
1214 /* Argument 0 is an address. */
1215 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1216
1217 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1218 if (TREE_CODE (arg1) != INTEGER_CST)
1219 {
1220 error ("second argument to %<__builtin_prefetch%> must be a constant");
1221 arg1 = integer_zero_node;
1222 }
1223 op1 = expand_normal (arg1);
1224 /* Argument 1 must be either zero or one. */
1225 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1226 {
1227 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1228 " using zero");
1229 op1 = const0_rtx;
1230 }
1231
1232 /* Argument 2 (locality) must be a compile-time constant int. */
1233 if (TREE_CODE (arg2) != INTEGER_CST)
1234 {
1235 error ("third argument to %<__builtin_prefetch%> must be a constant");
1236 arg2 = integer_zero_node;
1237 }
1238 op2 = expand_normal (arg2);
1239 /* Argument 2 must be 0, 1, 2, or 3. */
1240 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1241 {
1242 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1243 op2 = const0_rtx;
1244 }
1245
1246 if (targetm.have_prefetch ())
1247 {
1248 struct expand_operand ops[3];
1249
1250 create_address_operand (&ops[0], op0);
1251 create_integer_operand (&ops[1], INTVAL (op1));
1252 create_integer_operand (&ops[2], INTVAL (op2));
1253 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1254 return;
1255 }
1256
1257 /* Don't do anything with direct references to volatile memory, but
1258 generate code to handle other side effects. */
1259 if (!MEM_P (op0) && side_effects_p (op0))
1260 emit_insn (op0);
1261 }
1262
1263 /* Get a MEM rtx for expression EXP which is the address of an operand
1264 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1265 the maximum length of the block of memory that might be accessed or
1266 NULL if unknown. */
1267
1268 static rtx
1269 get_memory_rtx (tree exp, tree len)
1270 {
1271 tree orig_exp = exp;
1272 rtx addr, mem;
1273
1274 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1275 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1276 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1277 exp = TREE_OPERAND (exp, 0);
1278
1279 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1280 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1281
1282 /* Get an expression we can use to find the attributes to assign to MEM.
1283 First remove any nops. */
1284 while (CONVERT_EXPR_P (exp)
1285 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1286 exp = TREE_OPERAND (exp, 0);
1287
1288 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1289 (as builtin stringops may alias with anything). */
1290 exp = fold_build2 (MEM_REF,
1291 build_array_type (char_type_node,
1292 build_range_type (sizetype,
1293 size_one_node, len)),
1294 exp, build_int_cst (ptr_type_node, 0));
1295
1296 /* If the MEM_REF has no acceptable address, try to get the base object
1297 from the original address we got, and build an all-aliasing
1298 unknown-sized access to that one. */
1299 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1300 set_mem_attributes (mem, exp, 0);
1301 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1302 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1303 0))))
1304 {
1305 exp = build_fold_addr_expr (exp);
1306 exp = fold_build2 (MEM_REF,
1307 build_array_type (char_type_node,
1308 build_range_type (sizetype,
1309 size_zero_node,
1310 NULL)),
1311 exp, build_int_cst (ptr_type_node, 0));
1312 set_mem_attributes (mem, exp, 0);
1313 }
1314 set_mem_alias_set (mem, 0);
1315 return mem;
1316 }
1317 \f
1318 /* Built-in functions to perform an untyped call and return. */
1319
1320 #define apply_args_mode \
1321 (this_target_builtins->x_apply_args_mode)
1322 #define apply_result_mode \
1323 (this_target_builtins->x_apply_result_mode)
1324
1325 /* Return the size required for the block returned by __builtin_apply_args,
1326 and initialize apply_args_mode. */
1327
1328 static int
1329 apply_args_size (void)
1330 {
1331 static int size = -1;
1332 int align;
1333 unsigned int regno;
1334 machine_mode mode;
1335
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1338 {
1339 /* The first value is the incoming arg-pointer. */
1340 size = GET_MODE_SIZE (Pmode);
1341
1342 /* The second value is the structure value address unless this is
1343 passed as an "invisible" first argument. */
1344 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1345 size += GET_MODE_SIZE (Pmode);
1346
1347 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1348 if (FUNCTION_ARG_REGNO_P (regno))
1349 {
1350 mode = targetm.calls.get_raw_arg_mode (regno);
1351
1352 gcc_assert (mode != VOIDmode);
1353
1354 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1355 if (size % align != 0)
1356 size = CEIL (size, align) * align;
1357 size += GET_MODE_SIZE (mode);
1358 apply_args_mode[regno] = mode;
1359 }
1360 else
1361 {
1362 apply_args_mode[regno] = VOIDmode;
1363 }
1364 }
1365 return size;
1366 }
1367
1368 /* Return the size required for the block returned by __builtin_apply,
1369 and initialize apply_result_mode. */
1370
1371 static int
1372 apply_result_size (void)
1373 {
1374 static int size = -1;
1375 int align, regno;
1376 machine_mode mode;
1377
1378 /* The values computed by this function never change. */
1379 if (size < 0)
1380 {
1381 size = 0;
1382
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (targetm.calls.function_value_regno_p (regno))
1385 {
1386 mode = targetm.calls.get_raw_result_mode (regno);
1387
1388 gcc_assert (mode != VOIDmode);
1389
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_result_mode[regno] = mode;
1395 }
1396 else
1397 apply_result_mode[regno] = VOIDmode;
1398
1399 /* Allow targets that use untyped_call and untyped_return to override
1400 the size so that machine-specific information can be stored here. */
1401 #ifdef APPLY_RESULT_SIZE
1402 size = APPLY_RESULT_SIZE;
1403 #endif
1404 }
1405 return size;
1406 }
1407
1408 /* Create a vector describing the result block RESULT. If SAVEP is true,
1409 the result block is used to save the values; otherwise it is used to
1410 restore the values. */
1411
1412 static rtx
1413 result_vector (int savep, rtx result)
1414 {
1415 int regno, size, align, nelts;
1416 machine_mode mode;
1417 rtx reg, mem;
1418 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1419
1420 size = nelts = 0;
1421 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1422 if ((mode = apply_result_mode[regno]) != VOIDmode)
1423 {
1424 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1425 if (size % align != 0)
1426 size = CEIL (size, align) * align;
1427 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1428 mem = adjust_address (result, mode, size);
1429 savevec[nelts++] = (savep
1430 ? gen_rtx_SET (mem, reg)
1431 : gen_rtx_SET (reg, mem));
1432 size += GET_MODE_SIZE (mode);
1433 }
1434 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1435 }
1436
1437 /* Save the state required to perform an untyped call with the same
1438 arguments as were passed to the current function. */
1439
1440 static rtx
1441 expand_builtin_apply_args_1 (void)
1442 {
1443 rtx registers, tem;
1444 int size, align, regno;
1445 machine_mode mode;
1446 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1447
1448 /* Create a block where the arg-pointer, structure value address,
1449 and argument registers can be saved. */
1450 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1451
1452 /* Walk past the arg-pointer and structure value address. */
1453 size = GET_MODE_SIZE (Pmode);
1454 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1455 size += GET_MODE_SIZE (Pmode);
1456
1457 /* Save each register used in calling a function to the block. */
1458 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1459 if ((mode = apply_args_mode[regno]) != VOIDmode)
1460 {
1461 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1462 if (size % align != 0)
1463 size = CEIL (size, align) * align;
1464
1465 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1466
1467 emit_move_insn (adjust_address (registers, mode, size), tem);
1468 size += GET_MODE_SIZE (mode);
1469 }
1470
1471 /* Save the arg pointer to the block. */
1472 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1473 /* We need the pointer as the caller actually passed them to us, not
1474 as we might have pretended they were passed. Make sure it's a valid
1475 operand, as emit_move_insn isn't expected to handle a PLUS. */
1476 if (STACK_GROWS_DOWNWARD)
1477 tem
1478 = force_operand (plus_constant (Pmode, tem,
1479 crtl->args.pretend_args_size),
1480 NULL_RTX);
1481 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1482
1483 size = GET_MODE_SIZE (Pmode);
1484
1485 /* Save the structure value address unless this is passed as an
1486 "invisible" first argument. */
1487 if (struct_incoming_value)
1488 {
1489 emit_move_insn (adjust_address (registers, Pmode, size),
1490 copy_to_reg (struct_incoming_value));
1491 size += GET_MODE_SIZE (Pmode);
1492 }
1493
1494 /* Return the address of the block. */
1495 return copy_addr_to_reg (XEXP (registers, 0));
1496 }
1497
1498 /* __builtin_apply_args returns block of memory allocated on
1499 the stack into which is stored the arg pointer, structure
1500 value address, static chain, and all the registers that might
1501 possibly be used in performing a function call. The code is
1502 moved to the start of the function so the incoming values are
1503 saved. */
1504
1505 static rtx
1506 expand_builtin_apply_args (void)
1507 {
1508 /* Don't do __builtin_apply_args more than once in a function.
1509 Save the result of the first call and reuse it. */
1510 if (apply_args_value != 0)
1511 return apply_args_value;
1512 {
1513 /* When this function is called, it means that registers must be
1514 saved on entry to this function. So we migrate the
1515 call to the first insn of this function. */
1516 rtx temp;
1517
1518 start_sequence ();
1519 temp = expand_builtin_apply_args_1 ();
1520 rtx_insn *seq = get_insns ();
1521 end_sequence ();
1522
1523 apply_args_value = temp;
1524
1525 /* Put the insns after the NOTE that starts the function.
1526 If this is inside a start_sequence, make the outer-level insn
1527 chain current, so the code is placed at the start of the
1528 function. If internal_arg_pointer is a non-virtual pseudo,
1529 it needs to be placed after the function that initializes
1530 that pseudo. */
1531 push_topmost_sequence ();
1532 if (REG_P (crtl->args.internal_arg_pointer)
1533 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1534 emit_insn_before (seq, parm_birth_insn);
1535 else
1536 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1537 pop_topmost_sequence ();
1538 return temp;
1539 }
1540 }
1541
1542 /* Perform an untyped call and save the state required to perform an
1543 untyped return of whatever value was returned by the given function. */
1544
1545 static rtx
1546 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1547 {
1548 int size, align, regno;
1549 machine_mode mode;
1550 rtx incoming_args, result, reg, dest, src;
1551 rtx_call_insn *call_insn;
1552 rtx old_stack_level = 0;
1553 rtx call_fusage = 0;
1554 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1555
1556 arguments = convert_memory_address (Pmode, arguments);
1557
1558 /* Create a block where the return registers can be saved. */
1559 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1560
1561 /* Fetch the arg pointer from the ARGUMENTS block. */
1562 incoming_args = gen_reg_rtx (Pmode);
1563 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1564 if (!STACK_GROWS_DOWNWARD)
1565 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1566 incoming_args, 0, OPTAB_LIB_WIDEN);
1567
1568 /* Push a new argument block and copy the arguments. Do not allow
1569 the (potential) memcpy call below to interfere with our stack
1570 manipulations. */
1571 do_pending_stack_adjust ();
1572 NO_DEFER_POP;
1573
1574 /* Save the stack with nonlocal if available. */
1575 if (targetm.have_save_stack_nonlocal ())
1576 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1577 else
1578 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1579
1580 /* Allocate a block of memory onto the stack and copy the memory
1581 arguments to the outgoing arguments address. We can pass TRUE
1582 as the 4th argument because we just saved the stack pointer
1583 and will restore it right after the call. */
1584 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1585
1586 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1587 may have already set current_function_calls_alloca to true.
1588 current_function_calls_alloca won't be set if argsize is zero,
1589 so we have to guarantee need_drap is true here. */
1590 if (SUPPORTS_STACK_ALIGNMENT)
1591 crtl->need_drap = true;
1592
1593 dest = virtual_outgoing_args_rtx;
1594 if (!STACK_GROWS_DOWNWARD)
1595 {
1596 if (CONST_INT_P (argsize))
1597 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1598 else
1599 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1600 }
1601 dest = gen_rtx_MEM (BLKmode, dest);
1602 set_mem_align (dest, PARM_BOUNDARY);
1603 src = gen_rtx_MEM (BLKmode, incoming_args);
1604 set_mem_align (src, PARM_BOUNDARY);
1605 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1606
1607 /* Refer to the argument block. */
1608 apply_args_size ();
1609 arguments = gen_rtx_MEM (BLKmode, arguments);
1610 set_mem_align (arguments, PARM_BOUNDARY);
1611
1612 /* Walk past the arg-pointer and structure value address. */
1613 size = GET_MODE_SIZE (Pmode);
1614 if (struct_value)
1615 size += GET_MODE_SIZE (Pmode);
1616
1617 /* Restore each of the registers previously saved. Make USE insns
1618 for each of these registers for use in making the call. */
1619 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1620 if ((mode = apply_args_mode[regno]) != VOIDmode)
1621 {
1622 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1623 if (size % align != 0)
1624 size = CEIL (size, align) * align;
1625 reg = gen_rtx_REG (mode, regno);
1626 emit_move_insn (reg, adjust_address (arguments, mode, size));
1627 use_reg (&call_fusage, reg);
1628 size += GET_MODE_SIZE (mode);
1629 }
1630
1631 /* Restore the structure value address unless this is passed as an
1632 "invisible" first argument. */
1633 size = GET_MODE_SIZE (Pmode);
1634 if (struct_value)
1635 {
1636 rtx value = gen_reg_rtx (Pmode);
1637 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1638 emit_move_insn (struct_value, value);
1639 if (REG_P (struct_value))
1640 use_reg (&call_fusage, struct_value);
1641 size += GET_MODE_SIZE (Pmode);
1642 }
1643
1644 /* All arguments and registers used for the call are set up by now! */
1645 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1646
1647 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1648 and we don't want to load it into a register as an optimization,
1649 because prepare_call_address already did it if it should be done. */
1650 if (GET_CODE (function) != SYMBOL_REF)
1651 function = memory_address (FUNCTION_MODE, function);
1652
1653 /* Generate the actual call instruction and save the return value. */
1654 if (targetm.have_untyped_call ())
1655 {
1656 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1657 emit_call_insn (targetm.gen_untyped_call (mem, result,
1658 result_vector (1, result)));
1659 }
1660 else if (targetm.have_call_value ())
1661 {
1662 rtx valreg = 0;
1663
1664 /* Locate the unique return register. It is not possible to
1665 express a call that sets more than one return register using
1666 call_value; use untyped_call for that. In fact, untyped_call
1667 only needs to save the return registers in the given block. */
1668 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1669 if ((mode = apply_result_mode[regno]) != VOIDmode)
1670 {
1671 gcc_assert (!valreg); /* have_untyped_call required. */
1672
1673 valreg = gen_rtx_REG (mode, regno);
1674 }
1675
1676 emit_insn (targetm.gen_call_value (valreg,
1677 gen_rtx_MEM (FUNCTION_MODE, function),
1678 const0_rtx, NULL_RTX, const0_rtx));
1679
1680 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1681 }
1682 else
1683 gcc_unreachable ();
1684
1685 /* Find the CALL insn we just emitted, and attach the register usage
1686 information. */
1687 call_insn = last_call_insn ();
1688 add_function_usage_to (call_insn, call_fusage);
1689
1690 /* Restore the stack. */
1691 if (targetm.have_save_stack_nonlocal ())
1692 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1693 else
1694 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1695 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1696
1697 OK_DEFER_POP;
1698
1699 /* Return the address of the result block. */
1700 result = copy_addr_to_reg (XEXP (result, 0));
1701 return convert_memory_address (ptr_mode, result);
1702 }
1703
1704 /* Perform an untyped return. */
1705
1706 static void
1707 expand_builtin_return (rtx result)
1708 {
1709 int size, align, regno;
1710 machine_mode mode;
1711 rtx reg;
1712 rtx_insn *call_fusage = 0;
1713
1714 result = convert_memory_address (Pmode, result);
1715
1716 apply_result_size ();
1717 result = gen_rtx_MEM (BLKmode, result);
1718
1719 if (targetm.have_untyped_return ())
1720 {
1721 rtx vector = result_vector (0, result);
1722 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1723 emit_barrier ();
1724 return;
1725 }
1726
1727 /* Restore the return value and note that each value is used. */
1728 size = 0;
1729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1730 if ((mode = apply_result_mode[regno]) != VOIDmode)
1731 {
1732 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1733 if (size % align != 0)
1734 size = CEIL (size, align) * align;
1735 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1736 emit_move_insn (reg, adjust_address (result, mode, size));
1737
1738 push_to_sequence (call_fusage);
1739 emit_use (reg);
1740 call_fusage = get_insns ();
1741 end_sequence ();
1742 size += GET_MODE_SIZE (mode);
1743 }
1744
1745 /* Put the USE insns before the return. */
1746 emit_insn (call_fusage);
1747
1748 /* Return whatever values was restored by jumping directly to the end
1749 of the function. */
1750 expand_naked_return ();
1751 }
1752
1753 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1754
1755 static enum type_class
1756 type_to_class (tree type)
1757 {
1758 switch (TREE_CODE (type))
1759 {
1760 case VOID_TYPE: return void_type_class;
1761 case INTEGER_TYPE: return integer_type_class;
1762 case ENUMERAL_TYPE: return enumeral_type_class;
1763 case BOOLEAN_TYPE: return boolean_type_class;
1764 case POINTER_TYPE: return pointer_type_class;
1765 case REFERENCE_TYPE: return reference_type_class;
1766 case OFFSET_TYPE: return offset_type_class;
1767 case REAL_TYPE: return real_type_class;
1768 case COMPLEX_TYPE: return complex_type_class;
1769 case FUNCTION_TYPE: return function_type_class;
1770 case METHOD_TYPE: return method_type_class;
1771 case RECORD_TYPE: return record_type_class;
1772 case UNION_TYPE:
1773 case QUAL_UNION_TYPE: return union_type_class;
1774 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1775 ? string_type_class : array_type_class);
1776 case LANG_TYPE: return lang_type_class;
1777 default: return no_type_class;
1778 }
1779 }
1780
1781 /* Expand a call EXP to __builtin_classify_type. */
1782
1783 static rtx
1784 expand_builtin_classify_type (tree exp)
1785 {
1786 if (call_expr_nargs (exp))
1787 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1788 return GEN_INT (no_type_class);
1789 }
1790
1791 /* This helper macro, meant to be used in mathfn_built_in below,
1792 determines which among a set of three builtin math functions is
1793 appropriate for a given type mode. The `F' and `L' cases are
1794 automatically generated from the `double' case. */
1795 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1796 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1797 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1798 fcodel = BUILT_IN_MATHFN##L ; break;
1799 /* Similar to above, but appends _R after any F/L suffix. */
1800 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1801 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1802 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1803 fcodel = BUILT_IN_MATHFN##L_R ; break;
1804
1805 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1806 if available. If IMPLICIT is true use the implicit builtin declaration,
1807 otherwise use the explicit declaration. If we can't do the conversion,
1808 return zero. */
1809
1810 static tree
1811 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1812 {
1813 enum built_in_function fcode, fcodef, fcodel, fcode2;
1814
1815 switch (fn)
1816 {
1817 CASE_MATHFN (BUILT_IN_ACOS)
1818 CASE_MATHFN (BUILT_IN_ACOSH)
1819 CASE_MATHFN (BUILT_IN_ASIN)
1820 CASE_MATHFN (BUILT_IN_ASINH)
1821 CASE_MATHFN (BUILT_IN_ATAN)
1822 CASE_MATHFN (BUILT_IN_ATAN2)
1823 CASE_MATHFN (BUILT_IN_ATANH)
1824 CASE_MATHFN (BUILT_IN_CBRT)
1825 CASE_MATHFN (BUILT_IN_CEIL)
1826 CASE_MATHFN (BUILT_IN_CEXPI)
1827 CASE_MATHFN (BUILT_IN_COPYSIGN)
1828 CASE_MATHFN (BUILT_IN_COS)
1829 CASE_MATHFN (BUILT_IN_COSH)
1830 CASE_MATHFN (BUILT_IN_DREM)
1831 CASE_MATHFN (BUILT_IN_ERF)
1832 CASE_MATHFN (BUILT_IN_ERFC)
1833 CASE_MATHFN (BUILT_IN_EXP)
1834 CASE_MATHFN (BUILT_IN_EXP10)
1835 CASE_MATHFN (BUILT_IN_EXP2)
1836 CASE_MATHFN (BUILT_IN_EXPM1)
1837 CASE_MATHFN (BUILT_IN_FABS)
1838 CASE_MATHFN (BUILT_IN_FDIM)
1839 CASE_MATHFN (BUILT_IN_FLOOR)
1840 CASE_MATHFN (BUILT_IN_FMA)
1841 CASE_MATHFN (BUILT_IN_FMAX)
1842 CASE_MATHFN (BUILT_IN_FMIN)
1843 CASE_MATHFN (BUILT_IN_FMOD)
1844 CASE_MATHFN (BUILT_IN_FREXP)
1845 CASE_MATHFN (BUILT_IN_GAMMA)
1846 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1847 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1848 CASE_MATHFN (BUILT_IN_HYPOT)
1849 CASE_MATHFN (BUILT_IN_ILOGB)
1850 CASE_MATHFN (BUILT_IN_ICEIL)
1851 CASE_MATHFN (BUILT_IN_IFLOOR)
1852 CASE_MATHFN (BUILT_IN_INF)
1853 CASE_MATHFN (BUILT_IN_IRINT)
1854 CASE_MATHFN (BUILT_IN_IROUND)
1855 CASE_MATHFN (BUILT_IN_ISINF)
1856 CASE_MATHFN (BUILT_IN_J0)
1857 CASE_MATHFN (BUILT_IN_J1)
1858 CASE_MATHFN (BUILT_IN_JN)
1859 CASE_MATHFN (BUILT_IN_LCEIL)
1860 CASE_MATHFN (BUILT_IN_LDEXP)
1861 CASE_MATHFN (BUILT_IN_LFLOOR)
1862 CASE_MATHFN (BUILT_IN_LGAMMA)
1863 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1864 CASE_MATHFN (BUILT_IN_LLCEIL)
1865 CASE_MATHFN (BUILT_IN_LLFLOOR)
1866 CASE_MATHFN (BUILT_IN_LLRINT)
1867 CASE_MATHFN (BUILT_IN_LLROUND)
1868 CASE_MATHFN (BUILT_IN_LOG)
1869 CASE_MATHFN (BUILT_IN_LOG10)
1870 CASE_MATHFN (BUILT_IN_LOG1P)
1871 CASE_MATHFN (BUILT_IN_LOG2)
1872 CASE_MATHFN (BUILT_IN_LOGB)
1873 CASE_MATHFN (BUILT_IN_LRINT)
1874 CASE_MATHFN (BUILT_IN_LROUND)
1875 CASE_MATHFN (BUILT_IN_MODF)
1876 CASE_MATHFN (BUILT_IN_NAN)
1877 CASE_MATHFN (BUILT_IN_NANS)
1878 CASE_MATHFN (BUILT_IN_NEARBYINT)
1879 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1880 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1881 CASE_MATHFN (BUILT_IN_POW)
1882 CASE_MATHFN (BUILT_IN_POWI)
1883 CASE_MATHFN (BUILT_IN_POW10)
1884 CASE_MATHFN (BUILT_IN_REMAINDER)
1885 CASE_MATHFN (BUILT_IN_REMQUO)
1886 CASE_MATHFN (BUILT_IN_RINT)
1887 CASE_MATHFN (BUILT_IN_ROUND)
1888 CASE_MATHFN (BUILT_IN_SCALB)
1889 CASE_MATHFN (BUILT_IN_SCALBLN)
1890 CASE_MATHFN (BUILT_IN_SCALBN)
1891 CASE_MATHFN (BUILT_IN_SIGNBIT)
1892 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1893 CASE_MATHFN (BUILT_IN_SIN)
1894 CASE_MATHFN (BUILT_IN_SINCOS)
1895 CASE_MATHFN (BUILT_IN_SINH)
1896 CASE_MATHFN (BUILT_IN_SQRT)
1897 CASE_MATHFN (BUILT_IN_TAN)
1898 CASE_MATHFN (BUILT_IN_TANH)
1899 CASE_MATHFN (BUILT_IN_TGAMMA)
1900 CASE_MATHFN (BUILT_IN_TRUNC)
1901 CASE_MATHFN (BUILT_IN_Y0)
1902 CASE_MATHFN (BUILT_IN_Y1)
1903 CASE_MATHFN (BUILT_IN_YN)
1904
1905 default:
1906 return NULL_TREE;
1907 }
1908
1909 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1910 fcode2 = fcode;
1911 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1912 fcode2 = fcodef;
1913 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1914 fcode2 = fcodel;
1915 else
1916 return NULL_TREE;
1917
1918 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1919 return NULL_TREE;
1920
1921 return builtin_decl_explicit (fcode2);
1922 }
1923
1924 /* Like mathfn_built_in_1(), but always use the implicit array. */
1925
1926 tree
1927 mathfn_built_in (tree type, enum built_in_function fn)
1928 {
1929 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1930 }
1931
1932 /* If errno must be maintained, expand the RTL to check if the result,
1933 TARGET, of a built-in function call, EXP, is NaN, and if so set
1934 errno to EDOM. */
1935
1936 static void
1937 expand_errno_check (tree exp, rtx target)
1938 {
1939 rtx_code_label *lab = gen_label_rtx ();
1940
1941 /* Test the result; if it is NaN, set errno=EDOM because
1942 the argument was not in the domain. */
1943 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1944 NULL_RTX, NULL, lab,
1945 /* The jump is very likely. */
1946 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1947
1948 #ifdef TARGET_EDOM
1949 /* If this built-in doesn't throw an exception, set errno directly. */
1950 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1951 {
1952 #ifdef GEN_ERRNO_RTX
1953 rtx errno_rtx = GEN_ERRNO_RTX;
1954 #else
1955 rtx errno_rtx
1956 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1957 #endif
1958 emit_move_insn (errno_rtx,
1959 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1960 emit_label (lab);
1961 return;
1962 }
1963 #endif
1964
1965 /* Make sure the library call isn't expanded as a tail call. */
1966 CALL_EXPR_TAILCALL (exp) = 0;
1967
1968 /* We can't set errno=EDOM directly; let the library call do it.
1969 Pop the arguments right away in case the call gets deleted. */
1970 NO_DEFER_POP;
1971 expand_call (exp, target, 0);
1972 OK_DEFER_POP;
1973 emit_label (lab);
1974 }
1975
1976 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1977 Return NULL_RTX if a normal call should be emitted rather than expanding
1978 the function in-line. EXP is the expression that is a call to the builtin
1979 function; if convenient, the result should be placed in TARGET.
1980 SUBTARGET may be used as the target for computing one of EXP's operands. */
1981
1982 static rtx
1983 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1984 {
1985 optab builtin_optab;
1986 rtx op0;
1987 rtx_insn *insns;
1988 tree fndecl = get_callee_fndecl (exp);
1989 machine_mode mode;
1990 bool errno_set = false;
1991 bool try_widening = false;
1992 tree arg;
1993
1994 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1995 return NULL_RTX;
1996
1997 arg = CALL_EXPR_ARG (exp, 0);
1998
1999 switch (DECL_FUNCTION_CODE (fndecl))
2000 {
2001 CASE_FLT_FN (BUILT_IN_SQRT):
2002 errno_set = ! tree_expr_nonnegative_p (arg);
2003 try_widening = true;
2004 builtin_optab = sqrt_optab;
2005 break;
2006 CASE_FLT_FN (BUILT_IN_EXP):
2007 errno_set = true; builtin_optab = exp_optab; break;
2008 CASE_FLT_FN (BUILT_IN_EXP10):
2009 CASE_FLT_FN (BUILT_IN_POW10):
2010 errno_set = true; builtin_optab = exp10_optab; break;
2011 CASE_FLT_FN (BUILT_IN_EXP2):
2012 errno_set = true; builtin_optab = exp2_optab; break;
2013 CASE_FLT_FN (BUILT_IN_EXPM1):
2014 errno_set = true; builtin_optab = expm1_optab; break;
2015 CASE_FLT_FN (BUILT_IN_LOGB):
2016 errno_set = true; builtin_optab = logb_optab; break;
2017 CASE_FLT_FN (BUILT_IN_LOG):
2018 errno_set = true; builtin_optab = log_optab; break;
2019 CASE_FLT_FN (BUILT_IN_LOG10):
2020 errno_set = true; builtin_optab = log10_optab; break;
2021 CASE_FLT_FN (BUILT_IN_LOG2):
2022 errno_set = true; builtin_optab = log2_optab; break;
2023 CASE_FLT_FN (BUILT_IN_LOG1P):
2024 errno_set = true; builtin_optab = log1p_optab; break;
2025 CASE_FLT_FN (BUILT_IN_ASIN):
2026 builtin_optab = asin_optab; break;
2027 CASE_FLT_FN (BUILT_IN_ACOS):
2028 builtin_optab = acos_optab; break;
2029 CASE_FLT_FN (BUILT_IN_TAN):
2030 builtin_optab = tan_optab; break;
2031 CASE_FLT_FN (BUILT_IN_ATAN):
2032 builtin_optab = atan_optab; break;
2033 CASE_FLT_FN (BUILT_IN_FLOOR):
2034 builtin_optab = floor_optab; break;
2035 CASE_FLT_FN (BUILT_IN_CEIL):
2036 builtin_optab = ceil_optab; break;
2037 CASE_FLT_FN (BUILT_IN_TRUNC):
2038 builtin_optab = btrunc_optab; break;
2039 CASE_FLT_FN (BUILT_IN_ROUND):
2040 builtin_optab = round_optab; break;
2041 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2042 builtin_optab = nearbyint_optab;
2043 if (flag_trapping_math)
2044 break;
2045 /* Else fallthrough and expand as rint. */
2046 CASE_FLT_FN (BUILT_IN_RINT):
2047 builtin_optab = rint_optab; break;
2048 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2049 builtin_optab = significand_optab; break;
2050 default:
2051 gcc_unreachable ();
2052 }
2053
2054 /* Make a suitable register to place result in. */
2055 mode = TYPE_MODE (TREE_TYPE (exp));
2056
2057 if (! flag_errno_math || ! HONOR_NANS (mode))
2058 errno_set = false;
2059
2060 /* Before working hard, check whether the instruction is available, but try
2061 to widen the mode for specific operations. */
2062 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2063 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2064 && (!errno_set || !optimize_insn_for_size_p ()))
2065 {
2066 rtx result = gen_reg_rtx (mode);
2067
2068 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2069 need to expand the argument again. This way, we will not perform
2070 side-effects more the once. */
2071 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2072
2073 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2074
2075 start_sequence ();
2076
2077 /* Compute into RESULT.
2078 Set RESULT to wherever the result comes back. */
2079 result = expand_unop (mode, builtin_optab, op0, result, 0);
2080
2081 if (result != 0)
2082 {
2083 if (errno_set)
2084 expand_errno_check (exp, result);
2085
2086 /* Output the entire sequence. */
2087 insns = get_insns ();
2088 end_sequence ();
2089 emit_insn (insns);
2090 return result;
2091 }
2092
2093 /* If we were unable to expand via the builtin, stop the sequence
2094 (without outputting the insns) and call to the library function
2095 with the stabilized argument list. */
2096 end_sequence ();
2097 }
2098
2099 return expand_call (exp, target, target == const0_rtx);
2100 }
2101
2102 /* Expand a call to the builtin binary math functions (pow and atan2).
2103 Return NULL_RTX if a normal call should be emitted rather than expanding the
2104 function in-line. EXP is the expression that is a call to the builtin
2105 function; if convenient, the result should be placed in TARGET.
2106 SUBTARGET may be used as the target for computing one of EXP's
2107 operands. */
2108
2109 static rtx
2110 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2111 {
2112 optab builtin_optab;
2113 rtx op0, op1, result;
2114 rtx_insn *insns;
2115 int op1_type = REAL_TYPE;
2116 tree fndecl = get_callee_fndecl (exp);
2117 tree arg0, arg1;
2118 machine_mode mode;
2119 bool errno_set = true;
2120
2121 switch (DECL_FUNCTION_CODE (fndecl))
2122 {
2123 CASE_FLT_FN (BUILT_IN_SCALBN):
2124 CASE_FLT_FN (BUILT_IN_SCALBLN):
2125 CASE_FLT_FN (BUILT_IN_LDEXP):
2126 op1_type = INTEGER_TYPE;
2127 default:
2128 break;
2129 }
2130
2131 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2132 return NULL_RTX;
2133
2134 arg0 = CALL_EXPR_ARG (exp, 0);
2135 arg1 = CALL_EXPR_ARG (exp, 1);
2136
2137 switch (DECL_FUNCTION_CODE (fndecl))
2138 {
2139 CASE_FLT_FN (BUILT_IN_POW):
2140 builtin_optab = pow_optab; break;
2141 CASE_FLT_FN (BUILT_IN_ATAN2):
2142 builtin_optab = atan2_optab; break;
2143 CASE_FLT_FN (BUILT_IN_SCALB):
2144 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2145 return 0;
2146 builtin_optab = scalb_optab; break;
2147 CASE_FLT_FN (BUILT_IN_SCALBN):
2148 CASE_FLT_FN (BUILT_IN_SCALBLN):
2149 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2150 return 0;
2151 /* Fall through... */
2152 CASE_FLT_FN (BUILT_IN_LDEXP):
2153 builtin_optab = ldexp_optab; break;
2154 CASE_FLT_FN (BUILT_IN_FMOD):
2155 builtin_optab = fmod_optab; break;
2156 CASE_FLT_FN (BUILT_IN_REMAINDER):
2157 CASE_FLT_FN (BUILT_IN_DREM):
2158 builtin_optab = remainder_optab; break;
2159 default:
2160 gcc_unreachable ();
2161 }
2162
2163 /* Make a suitable register to place result in. */
2164 mode = TYPE_MODE (TREE_TYPE (exp));
2165
2166 /* Before working hard, check whether the instruction is available. */
2167 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2168 return NULL_RTX;
2169
2170 result = gen_reg_rtx (mode);
2171
2172 if (! flag_errno_math || ! HONOR_NANS (mode))
2173 errno_set = false;
2174
2175 if (errno_set && optimize_insn_for_size_p ())
2176 return 0;
2177
2178 /* Always stabilize the argument list. */
2179 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2180 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2181
2182 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2183 op1 = expand_normal (arg1);
2184
2185 start_sequence ();
2186
2187 /* Compute into RESULT.
2188 Set RESULT to wherever the result comes back. */
2189 result = expand_binop (mode, builtin_optab, op0, op1,
2190 result, 0, OPTAB_DIRECT);
2191
2192 /* If we were unable to expand via the builtin, stop the sequence
2193 (without outputting the insns) and call to the library function
2194 with the stabilized argument list. */
2195 if (result == 0)
2196 {
2197 end_sequence ();
2198 return expand_call (exp, target, target == const0_rtx);
2199 }
2200
2201 if (errno_set)
2202 expand_errno_check (exp, result);
2203
2204 /* Output the entire sequence. */
2205 insns = get_insns ();
2206 end_sequence ();
2207 emit_insn (insns);
2208
2209 return result;
2210 }
2211
2212 /* Expand a call to the builtin trinary math functions (fma).
2213 Return NULL_RTX if a normal call should be emitted rather than expanding the
2214 function in-line. EXP is the expression that is a call to the builtin
2215 function; if convenient, the result should be placed in TARGET.
2216 SUBTARGET may be used as the target for computing one of EXP's
2217 operands. */
2218
2219 static rtx
2220 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2221 {
2222 optab builtin_optab;
2223 rtx op0, op1, op2, result;
2224 rtx_insn *insns;
2225 tree fndecl = get_callee_fndecl (exp);
2226 tree arg0, arg1, arg2;
2227 machine_mode mode;
2228
2229 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2230 return NULL_RTX;
2231
2232 arg0 = CALL_EXPR_ARG (exp, 0);
2233 arg1 = CALL_EXPR_ARG (exp, 1);
2234 arg2 = CALL_EXPR_ARG (exp, 2);
2235
2236 switch (DECL_FUNCTION_CODE (fndecl))
2237 {
2238 CASE_FLT_FN (BUILT_IN_FMA):
2239 builtin_optab = fma_optab; break;
2240 default:
2241 gcc_unreachable ();
2242 }
2243
2244 /* Make a suitable register to place result in. */
2245 mode = TYPE_MODE (TREE_TYPE (exp));
2246
2247 /* Before working hard, check whether the instruction is available. */
2248 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2249 return NULL_RTX;
2250
2251 result = gen_reg_rtx (mode);
2252
2253 /* Always stabilize the argument list. */
2254 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2255 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2256 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2257
2258 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2259 op1 = expand_normal (arg1);
2260 op2 = expand_normal (arg2);
2261
2262 start_sequence ();
2263
2264 /* Compute into RESULT.
2265 Set RESULT to wherever the result comes back. */
2266 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2267 result, 0);
2268
2269 /* If we were unable to expand via the builtin, stop the sequence
2270 (without outputting the insns) and call to the library function
2271 with the stabilized argument list. */
2272 if (result == 0)
2273 {
2274 end_sequence ();
2275 return expand_call (exp, target, target == const0_rtx);
2276 }
2277
2278 /* Output the entire sequence. */
2279 insns = get_insns ();
2280 end_sequence ();
2281 emit_insn (insns);
2282
2283 return result;
2284 }
2285
2286 /* Expand a call to the builtin sin and cos math functions.
2287 Return NULL_RTX if a normal call should be emitted rather than expanding the
2288 function in-line. EXP is the expression that is a call to the builtin
2289 function; if convenient, the result should be placed in TARGET.
2290 SUBTARGET may be used as the target for computing one of EXP's
2291 operands. */
2292
2293 static rtx
2294 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2295 {
2296 optab builtin_optab;
2297 rtx op0;
2298 rtx_insn *insns;
2299 tree fndecl = get_callee_fndecl (exp);
2300 machine_mode mode;
2301 tree arg;
2302
2303 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2304 return NULL_RTX;
2305
2306 arg = CALL_EXPR_ARG (exp, 0);
2307
2308 switch (DECL_FUNCTION_CODE (fndecl))
2309 {
2310 CASE_FLT_FN (BUILT_IN_SIN):
2311 CASE_FLT_FN (BUILT_IN_COS):
2312 builtin_optab = sincos_optab; break;
2313 default:
2314 gcc_unreachable ();
2315 }
2316
2317 /* Make a suitable register to place result in. */
2318 mode = TYPE_MODE (TREE_TYPE (exp));
2319
2320 /* Check if sincos insn is available, otherwise fallback
2321 to sin or cos insn. */
2322 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2323 switch (DECL_FUNCTION_CODE (fndecl))
2324 {
2325 CASE_FLT_FN (BUILT_IN_SIN):
2326 builtin_optab = sin_optab; break;
2327 CASE_FLT_FN (BUILT_IN_COS):
2328 builtin_optab = cos_optab; break;
2329 default:
2330 gcc_unreachable ();
2331 }
2332
2333 /* Before working hard, check whether the instruction is available. */
2334 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2335 {
2336 rtx result = gen_reg_rtx (mode);
2337
2338 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2339 need to expand the argument again. This way, we will not perform
2340 side-effects more the once. */
2341 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2342
2343 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2344
2345 start_sequence ();
2346
2347 /* Compute into RESULT.
2348 Set RESULT to wherever the result comes back. */
2349 if (builtin_optab == sincos_optab)
2350 {
2351 int ok;
2352
2353 switch (DECL_FUNCTION_CODE (fndecl))
2354 {
2355 CASE_FLT_FN (BUILT_IN_SIN):
2356 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2357 break;
2358 CASE_FLT_FN (BUILT_IN_COS):
2359 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2360 break;
2361 default:
2362 gcc_unreachable ();
2363 }
2364 gcc_assert (ok);
2365 }
2366 else
2367 result = expand_unop (mode, builtin_optab, op0, result, 0);
2368
2369 if (result != 0)
2370 {
2371 /* Output the entire sequence. */
2372 insns = get_insns ();
2373 end_sequence ();
2374 emit_insn (insns);
2375 return result;
2376 }
2377
2378 /* If we were unable to expand via the builtin, stop the sequence
2379 (without outputting the insns) and call to the library function
2380 with the stabilized argument list. */
2381 end_sequence ();
2382 }
2383
2384 return expand_call (exp, target, target == const0_rtx);
2385 }
2386
2387 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2388 return an RTL instruction code that implements the functionality.
2389 If that isn't possible or available return CODE_FOR_nothing. */
2390
2391 static enum insn_code
2392 interclass_mathfn_icode (tree arg, tree fndecl)
2393 {
2394 bool errno_set = false;
2395 optab builtin_optab = unknown_optab;
2396 machine_mode mode;
2397
2398 switch (DECL_FUNCTION_CODE (fndecl))
2399 {
2400 CASE_FLT_FN (BUILT_IN_ILOGB):
2401 errno_set = true; builtin_optab = ilogb_optab; break;
2402 CASE_FLT_FN (BUILT_IN_ISINF):
2403 builtin_optab = isinf_optab; break;
2404 case BUILT_IN_ISNORMAL:
2405 case BUILT_IN_ISFINITE:
2406 CASE_FLT_FN (BUILT_IN_FINITE):
2407 case BUILT_IN_FINITED32:
2408 case BUILT_IN_FINITED64:
2409 case BUILT_IN_FINITED128:
2410 case BUILT_IN_ISINFD32:
2411 case BUILT_IN_ISINFD64:
2412 case BUILT_IN_ISINFD128:
2413 /* These builtins have no optabs (yet). */
2414 break;
2415 default:
2416 gcc_unreachable ();
2417 }
2418
2419 /* There's no easy way to detect the case we need to set EDOM. */
2420 if (flag_errno_math && errno_set)
2421 return CODE_FOR_nothing;
2422
2423 /* Optab mode depends on the mode of the input argument. */
2424 mode = TYPE_MODE (TREE_TYPE (arg));
2425
2426 if (builtin_optab)
2427 return optab_handler (builtin_optab, mode);
2428 return CODE_FOR_nothing;
2429 }
2430
2431 /* Expand a call to one of the builtin math functions that operate on
2432 floating point argument and output an integer result (ilogb, isinf,
2433 isnan, etc).
2434 Return 0 if a normal call should be emitted rather than expanding the
2435 function in-line. EXP is the expression that is a call to the builtin
2436 function; if convenient, the result should be placed in TARGET. */
2437
2438 static rtx
2439 expand_builtin_interclass_mathfn (tree exp, rtx target)
2440 {
2441 enum insn_code icode = CODE_FOR_nothing;
2442 rtx op0;
2443 tree fndecl = get_callee_fndecl (exp);
2444 machine_mode mode;
2445 tree arg;
2446
2447 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2448 return NULL_RTX;
2449
2450 arg = CALL_EXPR_ARG (exp, 0);
2451 icode = interclass_mathfn_icode (arg, fndecl);
2452 mode = TYPE_MODE (TREE_TYPE (arg));
2453
2454 if (icode != CODE_FOR_nothing)
2455 {
2456 struct expand_operand ops[1];
2457 rtx_insn *last = get_last_insn ();
2458 tree orig_arg = arg;
2459
2460 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2461 need to expand the argument again. This way, we will not perform
2462 side-effects more the once. */
2463 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2464
2465 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2466
2467 if (mode != GET_MODE (op0))
2468 op0 = convert_to_mode (mode, op0, 0);
2469
2470 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2471 if (maybe_legitimize_operands (icode, 0, 1, ops)
2472 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2473 return ops[0].value;
2474
2475 delete_insns_since (last);
2476 CALL_EXPR_ARG (exp, 0) = orig_arg;
2477 }
2478
2479 return NULL_RTX;
2480 }
2481
2482 /* Expand a call to the builtin sincos math function.
2483 Return NULL_RTX if a normal call should be emitted rather than expanding the
2484 function in-line. EXP is the expression that is a call to the builtin
2485 function. */
2486
2487 static rtx
2488 expand_builtin_sincos (tree exp)
2489 {
2490 rtx op0, op1, op2, target1, target2;
2491 machine_mode mode;
2492 tree arg, sinp, cosp;
2493 int result;
2494 location_t loc = EXPR_LOCATION (exp);
2495 tree alias_type, alias_off;
2496
2497 if (!validate_arglist (exp, REAL_TYPE,
2498 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2499 return NULL_RTX;
2500
2501 arg = CALL_EXPR_ARG (exp, 0);
2502 sinp = CALL_EXPR_ARG (exp, 1);
2503 cosp = CALL_EXPR_ARG (exp, 2);
2504
2505 /* Make a suitable register to place result in. */
2506 mode = TYPE_MODE (TREE_TYPE (arg));
2507
2508 /* Check if sincos insn is available, otherwise emit the call. */
2509 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2510 return NULL_RTX;
2511
2512 target1 = gen_reg_rtx (mode);
2513 target2 = gen_reg_rtx (mode);
2514
2515 op0 = expand_normal (arg);
2516 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2517 alias_off = build_int_cst (alias_type, 0);
2518 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2519 sinp, alias_off));
2520 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2521 cosp, alias_off));
2522
2523 /* Compute into target1 and target2.
2524 Set TARGET to wherever the result comes back. */
2525 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2526 gcc_assert (result);
2527
2528 /* Move target1 and target2 to the memory locations indicated
2529 by op1 and op2. */
2530 emit_move_insn (op1, target1);
2531 emit_move_insn (op2, target2);
2532
2533 return const0_rtx;
2534 }
2535
2536 /* Expand a call to the internal cexpi builtin to the sincos math function.
2537 EXP is the expression that is a call to the builtin function; if convenient,
2538 the result should be placed in TARGET. */
2539
2540 static rtx
2541 expand_builtin_cexpi (tree exp, rtx target)
2542 {
2543 tree fndecl = get_callee_fndecl (exp);
2544 tree arg, type;
2545 machine_mode mode;
2546 rtx op0, op1, op2;
2547 location_t loc = EXPR_LOCATION (exp);
2548
2549 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2550 return NULL_RTX;
2551
2552 arg = CALL_EXPR_ARG (exp, 0);
2553 type = TREE_TYPE (arg);
2554 mode = TYPE_MODE (TREE_TYPE (arg));
2555
2556 /* Try expanding via a sincos optab, fall back to emitting a libcall
2557 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2558 is only generated from sincos, cexp or if we have either of them. */
2559 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2560 {
2561 op1 = gen_reg_rtx (mode);
2562 op2 = gen_reg_rtx (mode);
2563
2564 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2565
2566 /* Compute into op1 and op2. */
2567 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2568 }
2569 else if (targetm.libc_has_function (function_sincos))
2570 {
2571 tree call, fn = NULL_TREE;
2572 tree top1, top2;
2573 rtx op1a, op2a;
2574
2575 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2576 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2577 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2578 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2579 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2580 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2581 else
2582 gcc_unreachable ();
2583
2584 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2585 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2586 op1a = copy_addr_to_reg (XEXP (op1, 0));
2587 op2a = copy_addr_to_reg (XEXP (op2, 0));
2588 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2589 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2590
2591 /* Make sure not to fold the sincos call again. */
2592 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2593 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2594 call, 3, arg, top1, top2));
2595 }
2596 else
2597 {
2598 tree call, fn = NULL_TREE, narg;
2599 tree ctype = build_complex_type (type);
2600
2601 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2602 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2603 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2604 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2605 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2606 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2607 else
2608 gcc_unreachable ();
2609
2610 /* If we don't have a decl for cexp create one. This is the
2611 friendliest fallback if the user calls __builtin_cexpi
2612 without full target C99 function support. */
2613 if (fn == NULL_TREE)
2614 {
2615 tree fntype;
2616 const char *name = NULL;
2617
2618 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2619 name = "cexpf";
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2621 name = "cexp";
2622 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2623 name = "cexpl";
2624
2625 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2626 fn = build_fn_decl (name, fntype);
2627 }
2628
2629 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2630 build_real (type, dconst0), arg);
2631
2632 /* Make sure not to fold the cexp call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2634 return expand_expr (build_call_nary (ctype, call, 1, narg),
2635 target, VOIDmode, EXPAND_NORMAL);
2636 }
2637
2638 /* Now build the proper return type. */
2639 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2640 make_tree (TREE_TYPE (arg), op2),
2641 make_tree (TREE_TYPE (arg), op1)),
2642 target, VOIDmode, EXPAND_NORMAL);
2643 }
2644
2645 /* Conveniently construct a function call expression. FNDECL names the
2646 function to be called, N is the number of arguments, and the "..."
2647 parameters are the argument expressions. Unlike build_call_exr
2648 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2649
2650 static tree
2651 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2652 {
2653 va_list ap;
2654 tree fntype = TREE_TYPE (fndecl);
2655 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2656
2657 va_start (ap, n);
2658 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2659 va_end (ap);
2660 SET_EXPR_LOCATION (fn, loc);
2661 return fn;
2662 }
2663
2664 /* Expand a call to one of the builtin rounding functions gcc defines
2665 as an extension (lfloor and lceil). As these are gcc extensions we
2666 do not need to worry about setting errno to EDOM.
2667 If expanding via optab fails, lower expression to (int)(floor(x)).
2668 EXP is the expression that is a call to the builtin function;
2669 if convenient, the result should be placed in TARGET. */
2670
2671 static rtx
2672 expand_builtin_int_roundingfn (tree exp, rtx target)
2673 {
2674 convert_optab builtin_optab;
2675 rtx op0, tmp;
2676 rtx_insn *insns;
2677 tree fndecl = get_callee_fndecl (exp);
2678 enum built_in_function fallback_fn;
2679 tree fallback_fndecl;
2680 machine_mode mode;
2681 tree arg;
2682
2683 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2684 gcc_unreachable ();
2685
2686 arg = CALL_EXPR_ARG (exp, 0);
2687
2688 switch (DECL_FUNCTION_CODE (fndecl))
2689 {
2690 CASE_FLT_FN (BUILT_IN_ICEIL):
2691 CASE_FLT_FN (BUILT_IN_LCEIL):
2692 CASE_FLT_FN (BUILT_IN_LLCEIL):
2693 builtin_optab = lceil_optab;
2694 fallback_fn = BUILT_IN_CEIL;
2695 break;
2696
2697 CASE_FLT_FN (BUILT_IN_IFLOOR):
2698 CASE_FLT_FN (BUILT_IN_LFLOOR):
2699 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2700 builtin_optab = lfloor_optab;
2701 fallback_fn = BUILT_IN_FLOOR;
2702 break;
2703
2704 default:
2705 gcc_unreachable ();
2706 }
2707
2708 /* Make a suitable register to place result in. */
2709 mode = TYPE_MODE (TREE_TYPE (exp));
2710
2711 target = gen_reg_rtx (mode);
2712
2713 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2714 need to expand the argument again. This way, we will not perform
2715 side-effects more the once. */
2716 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2717
2718 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2719
2720 start_sequence ();
2721
2722 /* Compute into TARGET. */
2723 if (expand_sfix_optab (target, op0, builtin_optab))
2724 {
2725 /* Output the entire sequence. */
2726 insns = get_insns ();
2727 end_sequence ();
2728 emit_insn (insns);
2729 return target;
2730 }
2731
2732 /* If we were unable to expand via the builtin, stop the sequence
2733 (without outputting the insns). */
2734 end_sequence ();
2735
2736 /* Fall back to floating point rounding optab. */
2737 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2738
2739 /* For non-C99 targets we may end up without a fallback fndecl here
2740 if the user called __builtin_lfloor directly. In this case emit
2741 a call to the floor/ceil variants nevertheless. This should result
2742 in the best user experience for not full C99 targets. */
2743 if (fallback_fndecl == NULL_TREE)
2744 {
2745 tree fntype;
2746 const char *name = NULL;
2747
2748 switch (DECL_FUNCTION_CODE (fndecl))
2749 {
2750 case BUILT_IN_ICEIL:
2751 case BUILT_IN_LCEIL:
2752 case BUILT_IN_LLCEIL:
2753 name = "ceil";
2754 break;
2755 case BUILT_IN_ICEILF:
2756 case BUILT_IN_LCEILF:
2757 case BUILT_IN_LLCEILF:
2758 name = "ceilf";
2759 break;
2760 case BUILT_IN_ICEILL:
2761 case BUILT_IN_LCEILL:
2762 case BUILT_IN_LLCEILL:
2763 name = "ceill";
2764 break;
2765 case BUILT_IN_IFLOOR:
2766 case BUILT_IN_LFLOOR:
2767 case BUILT_IN_LLFLOOR:
2768 name = "floor";
2769 break;
2770 case BUILT_IN_IFLOORF:
2771 case BUILT_IN_LFLOORF:
2772 case BUILT_IN_LLFLOORF:
2773 name = "floorf";
2774 break;
2775 case BUILT_IN_IFLOORL:
2776 case BUILT_IN_LFLOORL:
2777 case BUILT_IN_LLFLOORL:
2778 name = "floorl";
2779 break;
2780 default:
2781 gcc_unreachable ();
2782 }
2783
2784 fntype = build_function_type_list (TREE_TYPE (arg),
2785 TREE_TYPE (arg), NULL_TREE);
2786 fallback_fndecl = build_fn_decl (name, fntype);
2787 }
2788
2789 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2790
2791 tmp = expand_normal (exp);
2792 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2793
2794 /* Truncate the result of floating point optab to integer
2795 via expand_fix (). */
2796 target = gen_reg_rtx (mode);
2797 expand_fix (target, tmp, 0);
2798
2799 return target;
2800 }
2801
2802 /* Expand a call to one of the builtin math functions doing integer
2803 conversion (lrint).
2804 Return 0 if a normal call should be emitted rather than expanding the
2805 function in-line. EXP is the expression that is a call to the builtin
2806 function; if convenient, the result should be placed in TARGET. */
2807
2808 static rtx
2809 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2810 {
2811 convert_optab builtin_optab;
2812 rtx op0;
2813 rtx_insn *insns;
2814 tree fndecl = get_callee_fndecl (exp);
2815 tree arg;
2816 machine_mode mode;
2817 enum built_in_function fallback_fn = BUILT_IN_NONE;
2818
2819 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2820 gcc_unreachable ();
2821
2822 arg = CALL_EXPR_ARG (exp, 0);
2823
2824 switch (DECL_FUNCTION_CODE (fndecl))
2825 {
2826 CASE_FLT_FN (BUILT_IN_IRINT):
2827 fallback_fn = BUILT_IN_LRINT;
2828 /* FALLTHRU */
2829 CASE_FLT_FN (BUILT_IN_LRINT):
2830 CASE_FLT_FN (BUILT_IN_LLRINT):
2831 builtin_optab = lrint_optab;
2832 break;
2833
2834 CASE_FLT_FN (BUILT_IN_IROUND):
2835 fallback_fn = BUILT_IN_LROUND;
2836 /* FALLTHRU */
2837 CASE_FLT_FN (BUILT_IN_LROUND):
2838 CASE_FLT_FN (BUILT_IN_LLROUND):
2839 builtin_optab = lround_optab;
2840 break;
2841
2842 default:
2843 gcc_unreachable ();
2844 }
2845
2846 /* There's no easy way to detect the case we need to set EDOM. */
2847 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2848 return NULL_RTX;
2849
2850 /* Make a suitable register to place result in. */
2851 mode = TYPE_MODE (TREE_TYPE (exp));
2852
2853 /* There's no easy way to detect the case we need to set EDOM. */
2854 if (!flag_errno_math)
2855 {
2856 rtx result = gen_reg_rtx (mode);
2857
2858 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2859 need to expand the argument again. This way, we will not perform
2860 side-effects more the once. */
2861 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2862
2863 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2864
2865 start_sequence ();
2866
2867 if (expand_sfix_optab (result, op0, builtin_optab))
2868 {
2869 /* Output the entire sequence. */
2870 insns = get_insns ();
2871 end_sequence ();
2872 emit_insn (insns);
2873 return result;
2874 }
2875
2876 /* If we were unable to expand via the builtin, stop the sequence
2877 (without outputting the insns) and call to the library function
2878 with the stabilized argument list. */
2879 end_sequence ();
2880 }
2881
2882 if (fallback_fn != BUILT_IN_NONE)
2883 {
2884 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2885 targets, (int) round (x) should never be transformed into
2886 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2887 a call to lround in the hope that the target provides at least some
2888 C99 functions. This should result in the best user experience for
2889 not full C99 targets. */
2890 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2891 fallback_fn, 0);
2892
2893 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2894 fallback_fndecl, 1, arg);
2895
2896 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2897 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2898 return convert_to_mode (mode, target, 0);
2899 }
2900
2901 return expand_call (exp, target, target == const0_rtx);
2902 }
2903
2904 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2905 a normal call should be emitted rather than expanding the function
2906 in-line. EXP is the expression that is a call to the builtin
2907 function; if convenient, the result should be placed in TARGET. */
2908
2909 static rtx
2910 expand_builtin_powi (tree exp, rtx target)
2911 {
2912 tree arg0, arg1;
2913 rtx op0, op1;
2914 machine_mode mode;
2915 machine_mode mode2;
2916
2917 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2918 return NULL_RTX;
2919
2920 arg0 = CALL_EXPR_ARG (exp, 0);
2921 arg1 = CALL_EXPR_ARG (exp, 1);
2922 mode = TYPE_MODE (TREE_TYPE (exp));
2923
2924 /* Emit a libcall to libgcc. */
2925
2926 /* Mode of the 2nd argument must match that of an int. */
2927 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2928
2929 if (target == NULL_RTX)
2930 target = gen_reg_rtx (mode);
2931
2932 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2933 if (GET_MODE (op0) != mode)
2934 op0 = convert_to_mode (mode, op0, 0);
2935 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2936 if (GET_MODE (op1) != mode2)
2937 op1 = convert_to_mode (mode2, op1, 0);
2938
2939 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2940 target, LCT_CONST, mode, 2,
2941 op0, mode, op1, mode2);
2942
2943 return target;
2944 }
2945
2946 /* Expand expression EXP which is a call to the strlen builtin. Return
2947 NULL_RTX if we failed the caller should emit a normal call, otherwise
2948 try to get the result in TARGET, if convenient. */
2949
2950 static rtx
2951 expand_builtin_strlen (tree exp, rtx target,
2952 machine_mode target_mode)
2953 {
2954 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2955 return NULL_RTX;
2956 else
2957 {
2958 struct expand_operand ops[4];
2959 rtx pat;
2960 tree len;
2961 tree src = CALL_EXPR_ARG (exp, 0);
2962 rtx src_reg;
2963 rtx_insn *before_strlen;
2964 machine_mode insn_mode = target_mode;
2965 enum insn_code icode = CODE_FOR_nothing;
2966 unsigned int align;
2967
2968 /* If the length can be computed at compile-time, return it. */
2969 len = c_strlen (src, 0);
2970 if (len)
2971 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2972
2973 /* If the length can be computed at compile-time and is constant
2974 integer, but there are side-effects in src, evaluate
2975 src for side-effects, then return len.
2976 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2977 can be optimized into: i++; x = 3; */
2978 len = c_strlen (src, 1);
2979 if (len && TREE_CODE (len) == INTEGER_CST)
2980 {
2981 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2982 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2983 }
2984
2985 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2986
2987 /* If SRC is not a pointer type, don't do this operation inline. */
2988 if (align == 0)
2989 return NULL_RTX;
2990
2991 /* Bail out if we can't compute strlen in the right mode. */
2992 while (insn_mode != VOIDmode)
2993 {
2994 icode = optab_handler (strlen_optab, insn_mode);
2995 if (icode != CODE_FOR_nothing)
2996 break;
2997
2998 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2999 }
3000 if (insn_mode == VOIDmode)
3001 return NULL_RTX;
3002
3003 /* Make a place to hold the source address. We will not expand
3004 the actual source until we are sure that the expansion will
3005 not fail -- there are trees that cannot be expanded twice. */
3006 src_reg = gen_reg_rtx (Pmode);
3007
3008 /* Mark the beginning of the strlen sequence so we can emit the
3009 source operand later. */
3010 before_strlen = get_last_insn ();
3011
3012 create_output_operand (&ops[0], target, insn_mode);
3013 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3014 create_integer_operand (&ops[2], 0);
3015 create_integer_operand (&ops[3], align);
3016 if (!maybe_expand_insn (icode, 4, ops))
3017 return NULL_RTX;
3018
3019 /* Now that we are assured of success, expand the source. */
3020 start_sequence ();
3021 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3022 if (pat != src_reg)
3023 {
3024 #ifdef POINTERS_EXTEND_UNSIGNED
3025 if (GET_MODE (pat) != Pmode)
3026 pat = convert_to_mode (Pmode, pat,
3027 POINTERS_EXTEND_UNSIGNED);
3028 #endif
3029 emit_move_insn (src_reg, pat);
3030 }
3031 pat = get_insns ();
3032 end_sequence ();
3033
3034 if (before_strlen)
3035 emit_insn_after (pat, before_strlen);
3036 else
3037 emit_insn_before (pat, get_insns ());
3038
3039 /* Return the value in the proper mode for this function. */
3040 if (GET_MODE (ops[0].value) == target_mode)
3041 target = ops[0].value;
3042 else if (target != 0)
3043 convert_move (target, ops[0].value, 0);
3044 else
3045 target = convert_to_mode (target_mode, ops[0].value, 0);
3046
3047 return target;
3048 }
3049 }
3050
3051 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3052 bytes from constant string DATA + OFFSET and return it as target
3053 constant. */
3054
3055 static rtx
3056 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3057 machine_mode mode)
3058 {
3059 const char *str = (const char *) data;
3060
3061 gcc_assert (offset >= 0
3062 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3063 <= strlen (str) + 1));
3064
3065 return c_readstr (str + offset, mode);
3066 }
3067
3068 /* LEN specify length of the block of memcpy/memset operation.
3069 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3070 In some cases we can make very likely guess on max size, then we
3071 set it into PROBABLE_MAX_SIZE. */
3072
3073 static void
3074 determine_block_size (tree len, rtx len_rtx,
3075 unsigned HOST_WIDE_INT *min_size,
3076 unsigned HOST_WIDE_INT *max_size,
3077 unsigned HOST_WIDE_INT *probable_max_size)
3078 {
3079 if (CONST_INT_P (len_rtx))
3080 {
3081 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3082 return;
3083 }
3084 else
3085 {
3086 wide_int min, max;
3087 enum value_range_type range_type = VR_UNDEFINED;
3088
3089 /* Determine bounds from the type. */
3090 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3091 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3092 else
3093 *min_size = 0;
3094 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3095 *probable_max_size = *max_size
3096 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3097 else
3098 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3099
3100 if (TREE_CODE (len) == SSA_NAME)
3101 range_type = get_range_info (len, &min, &max);
3102 if (range_type == VR_RANGE)
3103 {
3104 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3105 *min_size = min.to_uhwi ();
3106 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3107 *probable_max_size = *max_size = max.to_uhwi ();
3108 }
3109 else if (range_type == VR_ANTI_RANGE)
3110 {
3111 /* Anti range 0...N lets us to determine minimal size to N+1. */
3112 if (min == 0)
3113 {
3114 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3115 *min_size = max.to_uhwi () + 1;
3116 }
3117 /* Code like
3118
3119 int n;
3120 if (n < 100)
3121 memcpy (a, b, n)
3122
3123 Produce anti range allowing negative values of N. We still
3124 can use the information and make a guess that N is not negative.
3125 */
3126 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3127 *probable_max_size = min.to_uhwi () - 1;
3128 }
3129 }
3130 gcc_checking_assert (*max_size <=
3131 (unsigned HOST_WIDE_INT)
3132 GET_MODE_MASK (GET_MODE (len_rtx)));
3133 }
3134
3135 /* Helper function to do the actual work for expand_builtin_memcpy. */
3136
3137 static rtx
3138 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3139 {
3140 const char *src_str;
3141 unsigned int src_align = get_pointer_alignment (src);
3142 unsigned int dest_align = get_pointer_alignment (dest);
3143 rtx dest_mem, src_mem, dest_addr, len_rtx;
3144 HOST_WIDE_INT expected_size = -1;
3145 unsigned int expected_align = 0;
3146 unsigned HOST_WIDE_INT min_size;
3147 unsigned HOST_WIDE_INT max_size;
3148 unsigned HOST_WIDE_INT probable_max_size;
3149
3150 /* If DEST is not a pointer type, call the normal function. */
3151 if (dest_align == 0)
3152 return NULL_RTX;
3153
3154 /* If either SRC is not a pointer type, don't do this
3155 operation in-line. */
3156 if (src_align == 0)
3157 return NULL_RTX;
3158
3159 if (currently_expanding_gimple_stmt)
3160 stringop_block_profile (currently_expanding_gimple_stmt,
3161 &expected_align, &expected_size);
3162
3163 if (expected_align < dest_align)
3164 expected_align = dest_align;
3165 dest_mem = get_memory_rtx (dest, len);
3166 set_mem_align (dest_mem, dest_align);
3167 len_rtx = expand_normal (len);
3168 determine_block_size (len, len_rtx, &min_size, &max_size,
3169 &probable_max_size);
3170 src_str = c_getstr (src);
3171
3172 /* If SRC is a string constant and block move would be done
3173 by pieces, we can avoid loading the string from memory
3174 and only stored the computed constants. */
3175 if (src_str
3176 && CONST_INT_P (len_rtx)
3177 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3178 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3179 CONST_CAST (char *, src_str),
3180 dest_align, false))
3181 {
3182 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3183 builtin_memcpy_read_str,
3184 CONST_CAST (char *, src_str),
3185 dest_align, false, 0);
3186 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3187 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3188 return dest_mem;
3189 }
3190
3191 src_mem = get_memory_rtx (src, len);
3192 set_mem_align (src_mem, src_align);
3193
3194 /* Copy word part most expediently. */
3195 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3196 CALL_EXPR_TAILCALL (exp)
3197 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3198 expected_align, expected_size,
3199 min_size, max_size, probable_max_size);
3200
3201 if (dest_addr == 0)
3202 {
3203 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3204 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3205 }
3206
3207 return dest_addr;
3208 }
3209
3210 /* Expand a call EXP to the memcpy builtin.
3211 Return NULL_RTX if we failed, the caller should emit a normal call,
3212 otherwise try to get the result in TARGET, if convenient (and in
3213 mode MODE if that's convenient). */
3214
3215 static rtx
3216 expand_builtin_memcpy (tree exp, rtx target)
3217 {
3218 if (!validate_arglist (exp,
3219 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3220 return NULL_RTX;
3221 else
3222 {
3223 tree dest = CALL_EXPR_ARG (exp, 0);
3224 tree src = CALL_EXPR_ARG (exp, 1);
3225 tree len = CALL_EXPR_ARG (exp, 2);
3226 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3227 }
3228 }
3229
3230 /* Expand an instrumented call EXP to the memcpy builtin.
3231 Return NULL_RTX if we failed, the caller should emit a normal call,
3232 otherwise try to get the result in TARGET, if convenient (and in
3233 mode MODE if that's convenient). */
3234
3235 static rtx
3236 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3237 {
3238 if (!validate_arglist (exp,
3239 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3240 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3241 INTEGER_TYPE, VOID_TYPE))
3242 return NULL_RTX;
3243 else
3244 {
3245 tree dest = CALL_EXPR_ARG (exp, 0);
3246 tree src = CALL_EXPR_ARG (exp, 2);
3247 tree len = CALL_EXPR_ARG (exp, 4);
3248 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3249
3250 /* Return src bounds with the result. */
3251 if (res)
3252 {
3253 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3254 expand_normal (CALL_EXPR_ARG (exp, 1)));
3255 res = chkp_join_splitted_slot (res, bnd);
3256 }
3257 return res;
3258 }
3259 }
3260
3261 /* Expand a call EXP to the mempcpy builtin.
3262 Return NULL_RTX if we failed; the caller should emit a normal call,
3263 otherwise try to get the result in TARGET, if convenient (and in
3264 mode MODE if that's convenient). If ENDP is 0 return the
3265 destination pointer, if ENDP is 1 return the end pointer ala
3266 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3267 stpcpy. */
3268
3269 static rtx
3270 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3271 {
3272 if (!validate_arglist (exp,
3273 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3276 {
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 1);
3279 tree len = CALL_EXPR_ARG (exp, 2);
3280 return expand_builtin_mempcpy_args (dest, src, len,
3281 target, mode, /*endp=*/ 1,
3282 exp);
3283 }
3284 }
3285
3286 /* Expand an instrumented call EXP to the mempcpy builtin.
3287 Return NULL_RTX if we failed, the caller should emit a normal call,
3288 otherwise try to get the result in TARGET, if convenient (and in
3289 mode MODE if that's convenient). */
3290
3291 static rtx
3292 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3293 {
3294 if (!validate_arglist (exp,
3295 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3296 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3297 INTEGER_TYPE, VOID_TYPE))
3298 return NULL_RTX;
3299 else
3300 {
3301 tree dest = CALL_EXPR_ARG (exp, 0);
3302 tree src = CALL_EXPR_ARG (exp, 2);
3303 tree len = CALL_EXPR_ARG (exp, 4);
3304 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3305 mode, 1, exp);
3306
3307 /* Return src bounds with the result. */
3308 if (res)
3309 {
3310 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3311 expand_normal (CALL_EXPR_ARG (exp, 1)));
3312 res = chkp_join_splitted_slot (res, bnd);
3313 }
3314 return res;
3315 }
3316 }
3317
3318 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3319 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3320 so that this can also be called without constructing an actual CALL_EXPR.
3321 The other arguments and return value are the same as for
3322 expand_builtin_mempcpy. */
3323
3324 static rtx
3325 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3326 rtx target, machine_mode mode, int endp,
3327 tree orig_exp)
3328 {
3329 tree fndecl = get_callee_fndecl (orig_exp);
3330
3331 /* If return value is ignored, transform mempcpy into memcpy. */
3332 if (target == const0_rtx
3333 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3334 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3335 {
3336 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3337 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3338 dest, src, len);
3339 return expand_expr (result, target, mode, EXPAND_NORMAL);
3340 }
3341 else if (target == const0_rtx
3342 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3343 {
3344 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3345 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3346 dest, src, len);
3347 return expand_expr (result, target, mode, EXPAND_NORMAL);
3348 }
3349 else
3350 {
3351 const char *src_str;
3352 unsigned int src_align = get_pointer_alignment (src);
3353 unsigned int dest_align = get_pointer_alignment (dest);
3354 rtx dest_mem, src_mem, len_rtx;
3355
3356 /* If either SRC or DEST is not a pointer type, don't do this
3357 operation in-line. */
3358 if (dest_align == 0 || src_align == 0)
3359 return NULL_RTX;
3360
3361 /* If LEN is not constant, call the normal function. */
3362 if (! tree_fits_uhwi_p (len))
3363 return NULL_RTX;
3364
3365 len_rtx = expand_normal (len);
3366 src_str = c_getstr (src);
3367
3368 /* If SRC is a string constant and block move would be done
3369 by pieces, we can avoid loading the string from memory
3370 and only stored the computed constants. */
3371 if (src_str
3372 && CONST_INT_P (len_rtx)
3373 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3374 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3375 CONST_CAST (char *, src_str),
3376 dest_align, false))
3377 {
3378 dest_mem = get_memory_rtx (dest, len);
3379 set_mem_align (dest_mem, dest_align);
3380 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3381 builtin_memcpy_read_str,
3382 CONST_CAST (char *, src_str),
3383 dest_align, false, endp);
3384 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3385 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3386 return dest_mem;
3387 }
3388
3389 if (CONST_INT_P (len_rtx)
3390 && can_move_by_pieces (INTVAL (len_rtx),
3391 MIN (dest_align, src_align)))
3392 {
3393 dest_mem = get_memory_rtx (dest, len);
3394 set_mem_align (dest_mem, dest_align);
3395 src_mem = get_memory_rtx (src, len);
3396 set_mem_align (src_mem, src_align);
3397 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3398 MIN (dest_align, src_align), endp);
3399 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3400 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3401 return dest_mem;
3402 }
3403
3404 return NULL_RTX;
3405 }
3406 }
3407
3408 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3409 we failed, the caller should emit a normal call, otherwise try to
3410 get the result in TARGET, if convenient. If ENDP is 0 return the
3411 destination pointer, if ENDP is 1 return the end pointer ala
3412 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3413 stpcpy. */
3414
3415 static rtx
3416 expand_movstr (tree dest, tree src, rtx target, int endp)
3417 {
3418 struct expand_operand ops[3];
3419 rtx dest_mem;
3420 rtx src_mem;
3421
3422 if (!targetm.have_movstr ())
3423 return NULL_RTX;
3424
3425 dest_mem = get_memory_rtx (dest, NULL);
3426 src_mem = get_memory_rtx (src, NULL);
3427 if (!endp)
3428 {
3429 target = force_reg (Pmode, XEXP (dest_mem, 0));
3430 dest_mem = replace_equiv_address (dest_mem, target);
3431 }
3432
3433 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3434 create_fixed_operand (&ops[1], dest_mem);
3435 create_fixed_operand (&ops[2], src_mem);
3436 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3437 return NULL_RTX;
3438
3439 if (endp && target != const0_rtx)
3440 {
3441 target = ops[0].value;
3442 /* movstr is supposed to set end to the address of the NUL
3443 terminator. If the caller requested a mempcpy-like return value,
3444 adjust it. */
3445 if (endp == 1)
3446 {
3447 rtx tem = plus_constant (GET_MODE (target),
3448 gen_lowpart (GET_MODE (target), target), 1);
3449 emit_move_insn (target, force_operand (tem, NULL_RTX));
3450 }
3451 }
3452 return target;
3453 }
3454
3455 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3456 NULL_RTX if we failed the caller should emit a normal call, otherwise
3457 try to get the result in TARGET, if convenient (and in mode MODE if that's
3458 convenient). */
3459
3460 static rtx
3461 expand_builtin_strcpy (tree exp, rtx target)
3462 {
3463 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3464 {
3465 tree dest = CALL_EXPR_ARG (exp, 0);
3466 tree src = CALL_EXPR_ARG (exp, 1);
3467 return expand_builtin_strcpy_args (dest, src, target);
3468 }
3469 return NULL_RTX;
3470 }
3471
3472 /* Helper function to do the actual work for expand_builtin_strcpy. The
3473 arguments to the builtin_strcpy call DEST and SRC are broken out
3474 so that this can also be called without constructing an actual CALL_EXPR.
3475 The other arguments and return value are the same as for
3476 expand_builtin_strcpy. */
3477
3478 static rtx
3479 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3480 {
3481 return expand_movstr (dest, src, target, /*endp=*/0);
3482 }
3483
3484 /* Expand a call EXP to the stpcpy builtin.
3485 Return NULL_RTX if we failed the caller should emit a normal call,
3486 otherwise try to get the result in TARGET, if convenient (and in
3487 mode MODE if that's convenient). */
3488
3489 static rtx
3490 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3491 {
3492 tree dst, src;
3493 location_t loc = EXPR_LOCATION (exp);
3494
3495 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3496 return NULL_RTX;
3497
3498 dst = CALL_EXPR_ARG (exp, 0);
3499 src = CALL_EXPR_ARG (exp, 1);
3500
3501 /* If return value is ignored, transform stpcpy into strcpy. */
3502 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3503 {
3504 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3505 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3506 return expand_expr (result, target, mode, EXPAND_NORMAL);
3507 }
3508 else
3509 {
3510 tree len, lenp1;
3511 rtx ret;
3512
3513 /* Ensure we get an actual string whose length can be evaluated at
3514 compile-time, not an expression containing a string. This is
3515 because the latter will potentially produce pessimized code
3516 when used to produce the return value. */
3517 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3518 return expand_movstr (dst, src, target, /*endp=*/2);
3519
3520 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3521 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3522 target, mode, /*endp=*/2,
3523 exp);
3524
3525 if (ret)
3526 return ret;
3527
3528 if (TREE_CODE (len) == INTEGER_CST)
3529 {
3530 rtx len_rtx = expand_normal (len);
3531
3532 if (CONST_INT_P (len_rtx))
3533 {
3534 ret = expand_builtin_strcpy_args (dst, src, target);
3535
3536 if (ret)
3537 {
3538 if (! target)
3539 {
3540 if (mode != VOIDmode)
3541 target = gen_reg_rtx (mode);
3542 else
3543 target = gen_reg_rtx (GET_MODE (ret));
3544 }
3545 if (GET_MODE (target) != GET_MODE (ret))
3546 ret = gen_lowpart (GET_MODE (target), ret);
3547
3548 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3549 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3550 gcc_assert (ret);
3551
3552 return target;
3553 }
3554 }
3555 }
3556
3557 return expand_movstr (dst, src, target, /*endp=*/2);
3558 }
3559 }
3560
3561 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3562 bytes from constant string DATA + OFFSET and return it as target
3563 constant. */
3564
3565 rtx
3566 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3567 machine_mode mode)
3568 {
3569 const char *str = (const char *) data;
3570
3571 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3572 return const0_rtx;
3573
3574 return c_readstr (str + offset, mode);
3575 }
3576
3577 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3578 NULL_RTX if we failed the caller should emit a normal call. */
3579
3580 static rtx
3581 expand_builtin_strncpy (tree exp, rtx target)
3582 {
3583 location_t loc = EXPR_LOCATION (exp);
3584
3585 if (validate_arglist (exp,
3586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3587 {
3588 tree dest = CALL_EXPR_ARG (exp, 0);
3589 tree src = CALL_EXPR_ARG (exp, 1);
3590 tree len = CALL_EXPR_ARG (exp, 2);
3591 tree slen = c_strlen (src, 1);
3592
3593 /* We must be passed a constant len and src parameter. */
3594 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3595 return NULL_RTX;
3596
3597 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3598
3599 /* We're required to pad with trailing zeros if the requested
3600 len is greater than strlen(s2)+1. In that case try to
3601 use store_by_pieces, if it fails, punt. */
3602 if (tree_int_cst_lt (slen, len))
3603 {
3604 unsigned int dest_align = get_pointer_alignment (dest);
3605 const char *p = c_getstr (src);
3606 rtx dest_mem;
3607
3608 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3609 || !can_store_by_pieces (tree_to_uhwi (len),
3610 builtin_strncpy_read_str,
3611 CONST_CAST (char *, p),
3612 dest_align, false))
3613 return NULL_RTX;
3614
3615 dest_mem = get_memory_rtx (dest, len);
3616 store_by_pieces (dest_mem, tree_to_uhwi (len),
3617 builtin_strncpy_read_str,
3618 CONST_CAST (char *, p), dest_align, false, 0);
3619 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3620 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3621 return dest_mem;
3622 }
3623 }
3624 return NULL_RTX;
3625 }
3626
3627 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3628 bytes from constant string DATA + OFFSET and return it as target
3629 constant. */
3630
3631 rtx
3632 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3633 machine_mode mode)
3634 {
3635 const char *c = (const char *) data;
3636 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3637
3638 memset (p, *c, GET_MODE_SIZE (mode));
3639
3640 return c_readstr (p, mode);
3641 }
3642
3643 /* Callback routine for store_by_pieces. Return the RTL of a register
3644 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3645 char value given in the RTL register data. For example, if mode is
3646 4 bytes wide, return the RTL for 0x01010101*data. */
3647
3648 static rtx
3649 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3650 machine_mode mode)
3651 {
3652 rtx target, coeff;
3653 size_t size;
3654 char *p;
3655
3656 size = GET_MODE_SIZE (mode);
3657 if (size == 1)
3658 return (rtx) data;
3659
3660 p = XALLOCAVEC (char, size);
3661 memset (p, 1, size);
3662 coeff = c_readstr (p, mode);
3663
3664 target = convert_to_mode (mode, (rtx) data, 1);
3665 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3666 return force_reg (mode, target);
3667 }
3668
3669 /* Expand expression EXP, which is a call to the memset builtin. Return
3670 NULL_RTX if we failed the caller should emit a normal call, otherwise
3671 try to get the result in TARGET, if convenient (and in mode MODE if that's
3672 convenient). */
3673
3674 static rtx
3675 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3676 {
3677 if (!validate_arglist (exp,
3678 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3679 return NULL_RTX;
3680 else
3681 {
3682 tree dest = CALL_EXPR_ARG (exp, 0);
3683 tree val = CALL_EXPR_ARG (exp, 1);
3684 tree len = CALL_EXPR_ARG (exp, 2);
3685 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3686 }
3687 }
3688
3689 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3690 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3691 try to get the result in TARGET, if convenient (and in mode MODE if that's
3692 convenient). */
3693
3694 static rtx
3695 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3696 {
3697 if (!validate_arglist (exp,
3698 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3699 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3700 return NULL_RTX;
3701 else
3702 {
3703 tree dest = CALL_EXPR_ARG (exp, 0);
3704 tree val = CALL_EXPR_ARG (exp, 2);
3705 tree len = CALL_EXPR_ARG (exp, 3);
3706 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3707
3708 /* Return src bounds with the result. */
3709 if (res)
3710 {
3711 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3712 expand_normal (CALL_EXPR_ARG (exp, 1)));
3713 res = chkp_join_splitted_slot (res, bnd);
3714 }
3715 return res;
3716 }
3717 }
3718
3719 /* Helper function to do the actual work for expand_builtin_memset. The
3720 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3721 so that this can also be called without constructing an actual CALL_EXPR.
3722 The other arguments and return value are the same as for
3723 expand_builtin_memset. */
3724
3725 static rtx
3726 expand_builtin_memset_args (tree dest, tree val, tree len,
3727 rtx target, machine_mode mode, tree orig_exp)
3728 {
3729 tree fndecl, fn;
3730 enum built_in_function fcode;
3731 machine_mode val_mode;
3732 char c;
3733 unsigned int dest_align;
3734 rtx dest_mem, dest_addr, len_rtx;
3735 HOST_WIDE_INT expected_size = -1;
3736 unsigned int expected_align = 0;
3737 unsigned HOST_WIDE_INT min_size;
3738 unsigned HOST_WIDE_INT max_size;
3739 unsigned HOST_WIDE_INT probable_max_size;
3740
3741 dest_align = get_pointer_alignment (dest);
3742
3743 /* If DEST is not a pointer type, don't do this operation in-line. */
3744 if (dest_align == 0)
3745 return NULL_RTX;
3746
3747 if (currently_expanding_gimple_stmt)
3748 stringop_block_profile (currently_expanding_gimple_stmt,
3749 &expected_align, &expected_size);
3750
3751 if (expected_align < dest_align)
3752 expected_align = dest_align;
3753
3754 /* If the LEN parameter is zero, return DEST. */
3755 if (integer_zerop (len))
3756 {
3757 /* Evaluate and ignore VAL in case it has side-effects. */
3758 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3759 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3760 }
3761
3762 /* Stabilize the arguments in case we fail. */
3763 dest = builtin_save_expr (dest);
3764 val = builtin_save_expr (val);
3765 len = builtin_save_expr (len);
3766
3767 len_rtx = expand_normal (len);
3768 determine_block_size (len, len_rtx, &min_size, &max_size,
3769 &probable_max_size);
3770 dest_mem = get_memory_rtx (dest, len);
3771 val_mode = TYPE_MODE (unsigned_char_type_node);
3772
3773 if (TREE_CODE (val) != INTEGER_CST)
3774 {
3775 rtx val_rtx;
3776
3777 val_rtx = expand_normal (val);
3778 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3779
3780 /* Assume that we can memset by pieces if we can store
3781 * the coefficients by pieces (in the required modes).
3782 * We can't pass builtin_memset_gen_str as that emits RTL. */
3783 c = 1;
3784 if (tree_fits_uhwi_p (len)
3785 && can_store_by_pieces (tree_to_uhwi (len),
3786 builtin_memset_read_str, &c, dest_align,
3787 true))
3788 {
3789 val_rtx = force_reg (val_mode, val_rtx);
3790 store_by_pieces (dest_mem, tree_to_uhwi (len),
3791 builtin_memset_gen_str, val_rtx, dest_align,
3792 true, 0);
3793 }
3794 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3795 dest_align, expected_align,
3796 expected_size, min_size, max_size,
3797 probable_max_size))
3798 goto do_libcall;
3799
3800 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3801 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3802 return dest_mem;
3803 }
3804
3805 if (target_char_cast (val, &c))
3806 goto do_libcall;
3807
3808 if (c)
3809 {
3810 if (tree_fits_uhwi_p (len)
3811 && can_store_by_pieces (tree_to_uhwi (len),
3812 builtin_memset_read_str, &c, dest_align,
3813 true))
3814 store_by_pieces (dest_mem, tree_to_uhwi (len),
3815 builtin_memset_read_str, &c, dest_align, true, 0);
3816 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3817 gen_int_mode (c, val_mode),
3818 dest_align, expected_align,
3819 expected_size, min_size, max_size,
3820 probable_max_size))
3821 goto do_libcall;
3822
3823 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3824 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3825 return dest_mem;
3826 }
3827
3828 set_mem_align (dest_mem, dest_align);
3829 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3830 CALL_EXPR_TAILCALL (orig_exp)
3831 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3832 expected_align, expected_size,
3833 min_size, max_size,
3834 probable_max_size);
3835
3836 if (dest_addr == 0)
3837 {
3838 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3839 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3840 }
3841
3842 return dest_addr;
3843
3844 do_libcall:
3845 fndecl = get_callee_fndecl (orig_exp);
3846 fcode = DECL_FUNCTION_CODE (fndecl);
3847 if (fcode == BUILT_IN_MEMSET
3848 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3849 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3850 dest, val, len);
3851 else if (fcode == BUILT_IN_BZERO)
3852 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3853 dest, len);
3854 else
3855 gcc_unreachable ();
3856 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3857 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3858 return expand_call (fn, target, target == const0_rtx);
3859 }
3860
3861 /* Expand expression EXP, which is a call to the bzero builtin. Return
3862 NULL_RTX if we failed the caller should emit a normal call. */
3863
3864 static rtx
3865 expand_builtin_bzero (tree exp)
3866 {
3867 tree dest, size;
3868 location_t loc = EXPR_LOCATION (exp);
3869
3870 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3871 return NULL_RTX;
3872
3873 dest = CALL_EXPR_ARG (exp, 0);
3874 size = CALL_EXPR_ARG (exp, 1);
3875
3876 /* New argument list transforming bzero(ptr x, int y) to
3877 memset(ptr x, int 0, size_t y). This is done this way
3878 so that if it isn't expanded inline, we fallback to
3879 calling bzero instead of memset. */
3880
3881 return expand_builtin_memset_args (dest, integer_zero_node,
3882 fold_convert_loc (loc,
3883 size_type_node, size),
3884 const0_rtx, VOIDmode, exp);
3885 }
3886
3887 /* Try to expand cmpstr operation ICODE with the given operands.
3888 Return the result rtx on success, otherwise return null. */
3889
3890 static rtx
3891 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3892 HOST_WIDE_INT align)
3893 {
3894 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3895
3896 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3897 target = NULL_RTX;
3898
3899 struct expand_operand ops[4];
3900 create_output_operand (&ops[0], target, insn_mode);
3901 create_fixed_operand (&ops[1], arg1_rtx);
3902 create_fixed_operand (&ops[2], arg2_rtx);
3903 create_integer_operand (&ops[3], align);
3904 if (maybe_expand_insn (icode, 4, ops))
3905 return ops[0].value;
3906 return NULL_RTX;
3907 }
3908
3909 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3910 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3911 otherwise return null. */
3912
3913 static rtx
3914 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3915 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3916 HOST_WIDE_INT align)
3917 {
3918 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3919
3920 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3921 target = NULL_RTX;
3922
3923 struct expand_operand ops[5];
3924 create_output_operand (&ops[0], target, insn_mode);
3925 create_fixed_operand (&ops[1], arg1_rtx);
3926 create_fixed_operand (&ops[2], arg2_rtx);
3927 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3928 TYPE_UNSIGNED (arg3_type));
3929 create_integer_operand (&ops[4], align);
3930 if (maybe_expand_insn (icode, 5, ops))
3931 return ops[0].value;
3932 return NULL_RTX;
3933 }
3934
3935 /* Expand expression EXP, which is a call to the memcmp built-in function.
3936 Return NULL_RTX if we failed and the caller should emit a normal call,
3937 otherwise try to get the result in TARGET, if convenient. */
3938
3939 static rtx
3940 expand_builtin_memcmp (tree exp, rtx target)
3941 {
3942 if (!validate_arglist (exp,
3943 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3944 return NULL_RTX;
3945
3946 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3947 implementing memcmp because it will stop if it encounters two
3948 zero bytes. */
3949 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3950 if (icode == CODE_FOR_nothing)
3951 return NULL_RTX;
3952
3953 tree arg1 = CALL_EXPR_ARG (exp, 0);
3954 tree arg2 = CALL_EXPR_ARG (exp, 1);
3955 tree len = CALL_EXPR_ARG (exp, 2);
3956
3957 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3958 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3959
3960 /* If we don't have POINTER_TYPE, call the function. */
3961 if (arg1_align == 0 || arg2_align == 0)
3962 return NULL_RTX;
3963
3964 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3965 location_t loc = EXPR_LOCATION (exp);
3966 rtx arg1_rtx = get_memory_rtx (arg1, len);
3967 rtx arg2_rtx = get_memory_rtx (arg2, len);
3968 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3969
3970 /* Set MEM_SIZE as appropriate. */
3971 if (CONST_INT_P (arg3_rtx))
3972 {
3973 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3974 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3975 }
3976
3977 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3978 TREE_TYPE (len), arg3_rtx,
3979 MIN (arg1_align, arg2_align));
3980 if (result)
3981 {
3982 /* Return the value in the proper mode for this function. */
3983 if (GET_MODE (result) == mode)
3984 return result;
3985
3986 if (target != 0)
3987 {
3988 convert_move (target, result, 0);
3989 return target;
3990 }
3991
3992 return convert_to_mode (mode, result, 0);
3993 }
3994
3995 result = target;
3996 if (! (result != 0
3997 && REG_P (result) && GET_MODE (result) == mode
3998 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3999 result = gen_reg_rtx (mode);
4000
4001 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4002 TYPE_MODE (integer_type_node), 3,
4003 XEXP (arg1_rtx, 0), Pmode,
4004 XEXP (arg2_rtx, 0), Pmode,
4005 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4006 TYPE_UNSIGNED (sizetype)),
4007 TYPE_MODE (sizetype));
4008 return result;
4009 }
4010
4011 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4012 if we failed the caller should emit a normal call, otherwise try to get
4013 the result in TARGET, if convenient. */
4014
4015 static rtx
4016 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4017 {
4018 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4019 return NULL_RTX;
4020
4021 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4022 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4023 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4024 {
4025 rtx arg1_rtx, arg2_rtx;
4026 tree fndecl, fn;
4027 tree arg1 = CALL_EXPR_ARG (exp, 0);
4028 tree arg2 = CALL_EXPR_ARG (exp, 1);
4029 rtx result = NULL_RTX;
4030
4031 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4032 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4033
4034 /* If we don't have POINTER_TYPE, call the function. */
4035 if (arg1_align == 0 || arg2_align == 0)
4036 return NULL_RTX;
4037
4038 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4039 arg1 = builtin_save_expr (arg1);
4040 arg2 = builtin_save_expr (arg2);
4041
4042 arg1_rtx = get_memory_rtx (arg1, NULL);
4043 arg2_rtx = get_memory_rtx (arg2, NULL);
4044
4045 /* Try to call cmpstrsi. */
4046 if (cmpstr_icode != CODE_FOR_nothing)
4047 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4048 MIN (arg1_align, arg2_align));
4049
4050 /* Try to determine at least one length and call cmpstrnsi. */
4051 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4052 {
4053 tree len;
4054 rtx arg3_rtx;
4055
4056 tree len1 = c_strlen (arg1, 1);
4057 tree len2 = c_strlen (arg2, 1);
4058
4059 if (len1)
4060 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4061 if (len2)
4062 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4063
4064 /* If we don't have a constant length for the first, use the length
4065 of the second, if we know it. We don't require a constant for
4066 this case; some cost analysis could be done if both are available
4067 but neither is constant. For now, assume they're equally cheap,
4068 unless one has side effects. If both strings have constant lengths,
4069 use the smaller. */
4070
4071 if (!len1)
4072 len = len2;
4073 else if (!len2)
4074 len = len1;
4075 else if (TREE_SIDE_EFFECTS (len1))
4076 len = len2;
4077 else if (TREE_SIDE_EFFECTS (len2))
4078 len = len1;
4079 else if (TREE_CODE (len1) != INTEGER_CST)
4080 len = len2;
4081 else if (TREE_CODE (len2) != INTEGER_CST)
4082 len = len1;
4083 else if (tree_int_cst_lt (len1, len2))
4084 len = len1;
4085 else
4086 len = len2;
4087
4088 /* If both arguments have side effects, we cannot optimize. */
4089 if (len && !TREE_SIDE_EFFECTS (len))
4090 {
4091 arg3_rtx = expand_normal (len);
4092 result = expand_cmpstrn_or_cmpmem
4093 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4094 arg3_rtx, MIN (arg1_align, arg2_align));
4095 }
4096 }
4097
4098 if (result)
4099 {
4100 /* Return the value in the proper mode for this function. */
4101 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4102 if (GET_MODE (result) == mode)
4103 return result;
4104 if (target == 0)
4105 return convert_to_mode (mode, result, 0);
4106 convert_move (target, result, 0);
4107 return target;
4108 }
4109
4110 /* Expand the library call ourselves using a stabilized argument
4111 list to avoid re-evaluating the function's arguments twice. */
4112 fndecl = get_callee_fndecl (exp);
4113 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4114 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4115 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4116 return expand_call (fn, target, target == const0_rtx);
4117 }
4118 return NULL_RTX;
4119 }
4120
4121 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4122 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4123 the result in TARGET, if convenient. */
4124
4125 static rtx
4126 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4127 ATTRIBUTE_UNUSED machine_mode mode)
4128 {
4129 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4130
4131 if (!validate_arglist (exp,
4132 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4133 return NULL_RTX;
4134
4135 /* If c_strlen can determine an expression for one of the string
4136 lengths, and it doesn't have side effects, then emit cmpstrnsi
4137 using length MIN(strlen(string)+1, arg3). */
4138 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4139 if (cmpstrn_icode != CODE_FOR_nothing)
4140 {
4141 tree len, len1, len2;
4142 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4143 rtx result;
4144 tree fndecl, fn;
4145 tree arg1 = CALL_EXPR_ARG (exp, 0);
4146 tree arg2 = CALL_EXPR_ARG (exp, 1);
4147 tree arg3 = CALL_EXPR_ARG (exp, 2);
4148
4149 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4150 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4151
4152 len1 = c_strlen (arg1, 1);
4153 len2 = c_strlen (arg2, 1);
4154
4155 if (len1)
4156 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4157 if (len2)
4158 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4159
4160 /* If we don't have a constant length for the first, use the length
4161 of the second, if we know it. We don't require a constant for
4162 this case; some cost analysis could be done if both are available
4163 but neither is constant. For now, assume they're equally cheap,
4164 unless one has side effects. If both strings have constant lengths,
4165 use the smaller. */
4166
4167 if (!len1)
4168 len = len2;
4169 else if (!len2)
4170 len = len1;
4171 else if (TREE_SIDE_EFFECTS (len1))
4172 len = len2;
4173 else if (TREE_SIDE_EFFECTS (len2))
4174 len = len1;
4175 else if (TREE_CODE (len1) != INTEGER_CST)
4176 len = len2;
4177 else if (TREE_CODE (len2) != INTEGER_CST)
4178 len = len1;
4179 else if (tree_int_cst_lt (len1, len2))
4180 len = len1;
4181 else
4182 len = len2;
4183
4184 /* If both arguments have side effects, we cannot optimize. */
4185 if (!len || TREE_SIDE_EFFECTS (len))
4186 return NULL_RTX;
4187
4188 /* The actual new length parameter is MIN(len,arg3). */
4189 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4190 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4191
4192 /* If we don't have POINTER_TYPE, call the function. */
4193 if (arg1_align == 0 || arg2_align == 0)
4194 return NULL_RTX;
4195
4196 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4197 arg1 = builtin_save_expr (arg1);
4198 arg2 = builtin_save_expr (arg2);
4199 len = builtin_save_expr (len);
4200
4201 arg1_rtx = get_memory_rtx (arg1, len);
4202 arg2_rtx = get_memory_rtx (arg2, len);
4203 arg3_rtx = expand_normal (len);
4204 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4205 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4206 MIN (arg1_align, arg2_align));
4207 if (result)
4208 {
4209 /* Return the value in the proper mode for this function. */
4210 mode = TYPE_MODE (TREE_TYPE (exp));
4211 if (GET_MODE (result) == mode)
4212 return result;
4213 if (target == 0)
4214 return convert_to_mode (mode, result, 0);
4215 convert_move (target, result, 0);
4216 return target;
4217 }
4218
4219 /* Expand the library call ourselves using a stabilized argument
4220 list to avoid re-evaluating the function's arguments twice. */
4221 fndecl = get_callee_fndecl (exp);
4222 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4223 arg1, arg2, len);
4224 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4225 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4226 return expand_call (fn, target, target == const0_rtx);
4227 }
4228 return NULL_RTX;
4229 }
4230
4231 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4232 if that's convenient. */
4233
4234 rtx
4235 expand_builtin_saveregs (void)
4236 {
4237 rtx val;
4238 rtx_insn *seq;
4239
4240 /* Don't do __builtin_saveregs more than once in a function.
4241 Save the result of the first call and reuse it. */
4242 if (saveregs_value != 0)
4243 return saveregs_value;
4244
4245 /* When this function is called, it means that registers must be
4246 saved on entry to this function. So we migrate the call to the
4247 first insn of this function. */
4248
4249 start_sequence ();
4250
4251 /* Do whatever the machine needs done in this case. */
4252 val = targetm.calls.expand_builtin_saveregs ();
4253
4254 seq = get_insns ();
4255 end_sequence ();
4256
4257 saveregs_value = val;
4258
4259 /* Put the insns after the NOTE that starts the function. If this
4260 is inside a start_sequence, make the outer-level insn chain current, so
4261 the code is placed at the start of the function. */
4262 push_topmost_sequence ();
4263 emit_insn_after (seq, entry_of_function ());
4264 pop_topmost_sequence ();
4265
4266 return val;
4267 }
4268
4269 /* Expand a call to __builtin_next_arg. */
4270
4271 static rtx
4272 expand_builtin_next_arg (void)
4273 {
4274 /* Checking arguments is already done in fold_builtin_next_arg
4275 that must be called before this function. */
4276 return expand_binop (ptr_mode, add_optab,
4277 crtl->args.internal_arg_pointer,
4278 crtl->args.arg_offset_rtx,
4279 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4280 }
4281
4282 /* Make it easier for the backends by protecting the valist argument
4283 from multiple evaluations. */
4284
4285 static tree
4286 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4287 {
4288 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4289
4290 /* The current way of determining the type of valist is completely
4291 bogus. We should have the information on the va builtin instead. */
4292 if (!vatype)
4293 vatype = targetm.fn_abi_va_list (cfun->decl);
4294
4295 if (TREE_CODE (vatype) == ARRAY_TYPE)
4296 {
4297 if (TREE_SIDE_EFFECTS (valist))
4298 valist = save_expr (valist);
4299
4300 /* For this case, the backends will be expecting a pointer to
4301 vatype, but it's possible we've actually been given an array
4302 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4303 So fix it. */
4304 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4305 {
4306 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4307 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4308 }
4309 }
4310 else
4311 {
4312 tree pt = build_pointer_type (vatype);
4313
4314 if (! needs_lvalue)
4315 {
4316 if (! TREE_SIDE_EFFECTS (valist))
4317 return valist;
4318
4319 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4320 TREE_SIDE_EFFECTS (valist) = 1;
4321 }
4322
4323 if (TREE_SIDE_EFFECTS (valist))
4324 valist = save_expr (valist);
4325 valist = fold_build2_loc (loc, MEM_REF,
4326 vatype, valist, build_int_cst (pt, 0));
4327 }
4328
4329 return valist;
4330 }
4331
4332 /* The "standard" definition of va_list is void*. */
4333
4334 tree
4335 std_build_builtin_va_list (void)
4336 {
4337 return ptr_type_node;
4338 }
4339
4340 /* The "standard" abi va_list is va_list_type_node. */
4341
4342 tree
4343 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4344 {
4345 return va_list_type_node;
4346 }
4347
4348 /* The "standard" type of va_list is va_list_type_node. */
4349
4350 tree
4351 std_canonical_va_list_type (tree type)
4352 {
4353 tree wtype, htype;
4354
4355 if (INDIRECT_REF_P (type))
4356 type = TREE_TYPE (type);
4357 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4358 type = TREE_TYPE (type);
4359 wtype = va_list_type_node;
4360 htype = type;
4361 /* Treat structure va_list types. */
4362 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4363 htype = TREE_TYPE (htype);
4364 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4365 {
4366 /* If va_list is an array type, the argument may have decayed
4367 to a pointer type, e.g. by being passed to another function.
4368 In that case, unwrap both types so that we can compare the
4369 underlying records. */
4370 if (TREE_CODE (htype) == ARRAY_TYPE
4371 || POINTER_TYPE_P (htype))
4372 {
4373 wtype = TREE_TYPE (wtype);
4374 htype = TREE_TYPE (htype);
4375 }
4376 }
4377 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4378 return va_list_type_node;
4379
4380 return NULL_TREE;
4381 }
4382
4383 /* The "standard" implementation of va_start: just assign `nextarg' to
4384 the variable. */
4385
4386 void
4387 std_expand_builtin_va_start (tree valist, rtx nextarg)
4388 {
4389 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4390 convert_move (va_r, nextarg, 0);
4391
4392 /* We do not have any valid bounds for the pointer, so
4393 just store zero bounds for it. */
4394 if (chkp_function_instrumented_p (current_function_decl))
4395 chkp_expand_bounds_reset_for_mem (valist,
4396 make_tree (TREE_TYPE (valist),
4397 nextarg));
4398 }
4399
4400 /* Expand EXP, a call to __builtin_va_start. */
4401
4402 static rtx
4403 expand_builtin_va_start (tree exp)
4404 {
4405 rtx nextarg;
4406 tree valist;
4407 location_t loc = EXPR_LOCATION (exp);
4408
4409 if (call_expr_nargs (exp) < 2)
4410 {
4411 error_at (loc, "too few arguments to function %<va_start%>");
4412 return const0_rtx;
4413 }
4414
4415 if (fold_builtin_next_arg (exp, true))
4416 return const0_rtx;
4417
4418 nextarg = expand_builtin_next_arg ();
4419 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4420
4421 if (targetm.expand_builtin_va_start)
4422 targetm.expand_builtin_va_start (valist, nextarg);
4423 else
4424 std_expand_builtin_va_start (valist, nextarg);
4425
4426 return const0_rtx;
4427 }
4428
4429 /* Expand EXP, a call to __builtin_va_end. */
4430
4431 static rtx
4432 expand_builtin_va_end (tree exp)
4433 {
4434 tree valist = CALL_EXPR_ARG (exp, 0);
4435
4436 /* Evaluate for side effects, if needed. I hate macros that don't
4437 do that. */
4438 if (TREE_SIDE_EFFECTS (valist))
4439 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4440
4441 return const0_rtx;
4442 }
4443
4444 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4445 builtin rather than just as an assignment in stdarg.h because of the
4446 nastiness of array-type va_list types. */
4447
4448 static rtx
4449 expand_builtin_va_copy (tree exp)
4450 {
4451 tree dst, src, t;
4452 location_t loc = EXPR_LOCATION (exp);
4453
4454 dst = CALL_EXPR_ARG (exp, 0);
4455 src = CALL_EXPR_ARG (exp, 1);
4456
4457 dst = stabilize_va_list_loc (loc, dst, 1);
4458 src = stabilize_va_list_loc (loc, src, 0);
4459
4460 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4461
4462 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4463 {
4464 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4465 TREE_SIDE_EFFECTS (t) = 1;
4466 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4467 }
4468 else
4469 {
4470 rtx dstb, srcb, size;
4471
4472 /* Evaluate to pointers. */
4473 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4474 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4475 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4476 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4477
4478 dstb = convert_memory_address (Pmode, dstb);
4479 srcb = convert_memory_address (Pmode, srcb);
4480
4481 /* "Dereference" to BLKmode memories. */
4482 dstb = gen_rtx_MEM (BLKmode, dstb);
4483 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4484 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4485 srcb = gen_rtx_MEM (BLKmode, srcb);
4486 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4487 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4488
4489 /* Copy. */
4490 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4491 }
4492
4493 return const0_rtx;
4494 }
4495
4496 /* Expand a call to one of the builtin functions __builtin_frame_address or
4497 __builtin_return_address. */
4498
4499 static rtx
4500 expand_builtin_frame_address (tree fndecl, tree exp)
4501 {
4502 /* The argument must be a nonnegative integer constant.
4503 It counts the number of frames to scan up the stack.
4504 The value is either the frame pointer value or the return
4505 address saved in that frame. */
4506 if (call_expr_nargs (exp) == 0)
4507 /* Warning about missing arg was already issued. */
4508 return const0_rtx;
4509 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4510 {
4511 error ("invalid argument to %qD", fndecl);
4512 return const0_rtx;
4513 }
4514 else
4515 {
4516 /* Number of frames to scan up the stack. */
4517 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4518
4519 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4520
4521 /* Some ports cannot access arbitrary stack frames. */
4522 if (tem == NULL)
4523 {
4524 warning (0, "unsupported argument to %qD", fndecl);
4525 return const0_rtx;
4526 }
4527
4528 if (count)
4529 {
4530 /* Warn since no effort is made to ensure that any frame
4531 beyond the current one exists or can be safely reached. */
4532 warning (OPT_Wframe_address, "calling %qD with "
4533 "a nonzero argument is unsafe", fndecl);
4534 }
4535
4536 /* For __builtin_frame_address, return what we've got. */
4537 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4538 return tem;
4539
4540 if (!REG_P (tem)
4541 && ! CONSTANT_P (tem))
4542 tem = copy_addr_to_reg (tem);
4543 return tem;
4544 }
4545 }
4546
4547 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4548 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4549 is the same as for allocate_dynamic_stack_space. */
4550
4551 static rtx
4552 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4553 {
4554 rtx op0;
4555 rtx result;
4556 bool valid_arglist;
4557 unsigned int align;
4558 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4559 == BUILT_IN_ALLOCA_WITH_ALIGN);
4560
4561 valid_arglist
4562 = (alloca_with_align
4563 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4564 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4565
4566 if (!valid_arglist)
4567 return NULL_RTX;
4568
4569 /* Compute the argument. */
4570 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4571
4572 /* Compute the alignment. */
4573 align = (alloca_with_align
4574 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4575 : BIGGEST_ALIGNMENT);
4576
4577 /* Allocate the desired space. */
4578 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4579 result = convert_memory_address (ptr_mode, result);
4580
4581 return result;
4582 }
4583
4584 /* Expand a call to bswap builtin in EXP.
4585 Return NULL_RTX if a normal call should be emitted rather than expanding the
4586 function in-line. If convenient, the result should be placed in TARGET.
4587 SUBTARGET may be used as the target for computing one of EXP's operands. */
4588
4589 static rtx
4590 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4591 rtx subtarget)
4592 {
4593 tree arg;
4594 rtx op0;
4595
4596 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4597 return NULL_RTX;
4598
4599 arg = CALL_EXPR_ARG (exp, 0);
4600 op0 = expand_expr (arg,
4601 subtarget && GET_MODE (subtarget) == target_mode
4602 ? subtarget : NULL_RTX,
4603 target_mode, EXPAND_NORMAL);
4604 if (GET_MODE (op0) != target_mode)
4605 op0 = convert_to_mode (target_mode, op0, 1);
4606
4607 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4608
4609 gcc_assert (target);
4610
4611 return convert_to_mode (target_mode, target, 1);
4612 }
4613
4614 /* Expand a call to a unary builtin in EXP.
4615 Return NULL_RTX if a normal call should be emitted rather than expanding the
4616 function in-line. If convenient, the result should be placed in TARGET.
4617 SUBTARGET may be used as the target for computing one of EXP's operands. */
4618
4619 static rtx
4620 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4621 rtx subtarget, optab op_optab)
4622 {
4623 rtx op0;
4624
4625 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4626 return NULL_RTX;
4627
4628 /* Compute the argument. */
4629 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4630 (subtarget
4631 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4632 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4633 VOIDmode, EXPAND_NORMAL);
4634 /* Compute op, into TARGET if possible.
4635 Set TARGET to wherever the result comes back. */
4636 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4637 op_optab, op0, target, op_optab != clrsb_optab);
4638 gcc_assert (target);
4639
4640 return convert_to_mode (target_mode, target, 0);
4641 }
4642
4643 /* Expand a call to __builtin_expect. We just return our argument
4644 as the builtin_expect semantic should've been already executed by
4645 tree branch prediction pass. */
4646
4647 static rtx
4648 expand_builtin_expect (tree exp, rtx target)
4649 {
4650 tree arg;
4651
4652 if (call_expr_nargs (exp) < 2)
4653 return const0_rtx;
4654 arg = CALL_EXPR_ARG (exp, 0);
4655
4656 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4657 /* When guessing was done, the hints should be already stripped away. */
4658 gcc_assert (!flag_guess_branch_prob
4659 || optimize == 0 || seen_error ());
4660 return target;
4661 }
4662
4663 /* Expand a call to __builtin_assume_aligned. We just return our first
4664 argument as the builtin_assume_aligned semantic should've been already
4665 executed by CCP. */
4666
4667 static rtx
4668 expand_builtin_assume_aligned (tree exp, rtx target)
4669 {
4670 if (call_expr_nargs (exp) < 2)
4671 return const0_rtx;
4672 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4673 EXPAND_NORMAL);
4674 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4675 && (call_expr_nargs (exp) < 3
4676 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4677 return target;
4678 }
4679
4680 void
4681 expand_builtin_trap (void)
4682 {
4683 if (targetm.have_trap ())
4684 {
4685 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4686 /* For trap insns when not accumulating outgoing args force
4687 REG_ARGS_SIZE note to prevent crossjumping of calls with
4688 different args sizes. */
4689 if (!ACCUMULATE_OUTGOING_ARGS)
4690 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4691 }
4692 else
4693 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4694 emit_barrier ();
4695 }
4696
4697 /* Expand a call to __builtin_unreachable. We do nothing except emit
4698 a barrier saying that control flow will not pass here.
4699
4700 It is the responsibility of the program being compiled to ensure
4701 that control flow does never reach __builtin_unreachable. */
4702 static void
4703 expand_builtin_unreachable (void)
4704 {
4705 emit_barrier ();
4706 }
4707
4708 /* Expand EXP, a call to fabs, fabsf or fabsl.
4709 Return NULL_RTX if a normal call should be emitted rather than expanding
4710 the function inline. If convenient, the result should be placed
4711 in TARGET. SUBTARGET may be used as the target for computing
4712 the operand. */
4713
4714 static rtx
4715 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4716 {
4717 machine_mode mode;
4718 tree arg;
4719 rtx op0;
4720
4721 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4722 return NULL_RTX;
4723
4724 arg = CALL_EXPR_ARG (exp, 0);
4725 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4726 mode = TYPE_MODE (TREE_TYPE (arg));
4727 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4728 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4729 }
4730
4731 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4732 Return NULL is a normal call should be emitted rather than expanding the
4733 function inline. If convenient, the result should be placed in TARGET.
4734 SUBTARGET may be used as the target for computing the operand. */
4735
4736 static rtx
4737 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4738 {
4739 rtx op0, op1;
4740 tree arg;
4741
4742 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4743 return NULL_RTX;
4744
4745 arg = CALL_EXPR_ARG (exp, 0);
4746 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4747
4748 arg = CALL_EXPR_ARG (exp, 1);
4749 op1 = expand_normal (arg);
4750
4751 return expand_copysign (op0, op1, target);
4752 }
4753
4754 /* Expand a call to __builtin___clear_cache. */
4755
4756 static rtx
4757 expand_builtin___clear_cache (tree exp)
4758 {
4759 if (!targetm.code_for_clear_cache)
4760 {
4761 #ifdef CLEAR_INSN_CACHE
4762 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4763 does something. Just do the default expansion to a call to
4764 __clear_cache(). */
4765 return NULL_RTX;
4766 #else
4767 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4768 does nothing. There is no need to call it. Do nothing. */
4769 return const0_rtx;
4770 #endif /* CLEAR_INSN_CACHE */
4771 }
4772
4773 /* We have a "clear_cache" insn, and it will handle everything. */
4774 tree begin, end;
4775 rtx begin_rtx, end_rtx;
4776
4777 /* We must not expand to a library call. If we did, any
4778 fallback library function in libgcc that might contain a call to
4779 __builtin___clear_cache() would recurse infinitely. */
4780 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4781 {
4782 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4783 return const0_rtx;
4784 }
4785
4786 if (targetm.have_clear_cache ())
4787 {
4788 struct expand_operand ops[2];
4789
4790 begin = CALL_EXPR_ARG (exp, 0);
4791 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4792
4793 end = CALL_EXPR_ARG (exp, 1);
4794 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4795
4796 create_address_operand (&ops[0], begin_rtx);
4797 create_address_operand (&ops[1], end_rtx);
4798 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4799 return const0_rtx;
4800 }
4801 return const0_rtx;
4802 }
4803
4804 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4805
4806 static rtx
4807 round_trampoline_addr (rtx tramp)
4808 {
4809 rtx temp, addend, mask;
4810
4811 /* If we don't need too much alignment, we'll have been guaranteed
4812 proper alignment by get_trampoline_type. */
4813 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4814 return tramp;
4815
4816 /* Round address up to desired boundary. */
4817 temp = gen_reg_rtx (Pmode);
4818 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4819 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4820
4821 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4822 temp, 0, OPTAB_LIB_WIDEN);
4823 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4824 temp, 0, OPTAB_LIB_WIDEN);
4825
4826 return tramp;
4827 }
4828
4829 static rtx
4830 expand_builtin_init_trampoline (tree exp, bool onstack)
4831 {
4832 tree t_tramp, t_func, t_chain;
4833 rtx m_tramp, r_tramp, r_chain, tmp;
4834
4835 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4836 POINTER_TYPE, VOID_TYPE))
4837 return NULL_RTX;
4838
4839 t_tramp = CALL_EXPR_ARG (exp, 0);
4840 t_func = CALL_EXPR_ARG (exp, 1);
4841 t_chain = CALL_EXPR_ARG (exp, 2);
4842
4843 r_tramp = expand_normal (t_tramp);
4844 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4845 MEM_NOTRAP_P (m_tramp) = 1;
4846
4847 /* If ONSTACK, the TRAMP argument should be the address of a field
4848 within the local function's FRAME decl. Either way, let's see if
4849 we can fill in the MEM_ATTRs for this memory. */
4850 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4851 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4852
4853 /* Creator of a heap trampoline is responsible for making sure the
4854 address is aligned to at least STACK_BOUNDARY. Normally malloc
4855 will ensure this anyhow. */
4856 tmp = round_trampoline_addr (r_tramp);
4857 if (tmp != r_tramp)
4858 {
4859 m_tramp = change_address (m_tramp, BLKmode, tmp);
4860 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4861 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4862 }
4863
4864 /* The FUNC argument should be the address of the nested function.
4865 Extract the actual function decl to pass to the hook. */
4866 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4867 t_func = TREE_OPERAND (t_func, 0);
4868 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4869
4870 r_chain = expand_normal (t_chain);
4871
4872 /* Generate insns to initialize the trampoline. */
4873 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4874
4875 if (onstack)
4876 {
4877 trampolines_created = 1;
4878
4879 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4880 "trampoline generated for nested function %qD", t_func);
4881 }
4882
4883 return const0_rtx;
4884 }
4885
4886 static rtx
4887 expand_builtin_adjust_trampoline (tree exp)
4888 {
4889 rtx tramp;
4890
4891 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4892 return NULL_RTX;
4893
4894 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4895 tramp = round_trampoline_addr (tramp);
4896 if (targetm.calls.trampoline_adjust_address)
4897 tramp = targetm.calls.trampoline_adjust_address (tramp);
4898
4899 return tramp;
4900 }
4901
4902 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4903 function. The function first checks whether the back end provides
4904 an insn to implement signbit for the respective mode. If not, it
4905 checks whether the floating point format of the value is such that
4906 the sign bit can be extracted. If that is not the case, error out.
4907 EXP is the expression that is a call to the builtin function; if
4908 convenient, the result should be placed in TARGET. */
4909 static rtx
4910 expand_builtin_signbit (tree exp, rtx target)
4911 {
4912 const struct real_format *fmt;
4913 machine_mode fmode, imode, rmode;
4914 tree arg;
4915 int word, bitpos;
4916 enum insn_code icode;
4917 rtx temp;
4918 location_t loc = EXPR_LOCATION (exp);
4919
4920 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4921 return NULL_RTX;
4922
4923 arg = CALL_EXPR_ARG (exp, 0);
4924 fmode = TYPE_MODE (TREE_TYPE (arg));
4925 rmode = TYPE_MODE (TREE_TYPE (exp));
4926 fmt = REAL_MODE_FORMAT (fmode);
4927
4928 arg = builtin_save_expr (arg);
4929
4930 /* Expand the argument yielding a RTX expression. */
4931 temp = expand_normal (arg);
4932
4933 /* Check if the back end provides an insn that handles signbit for the
4934 argument's mode. */
4935 icode = optab_handler (signbit_optab, fmode);
4936 if (icode != CODE_FOR_nothing)
4937 {
4938 rtx_insn *last = get_last_insn ();
4939 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4940 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4941 return target;
4942 delete_insns_since (last);
4943 }
4944
4945 /* For floating point formats without a sign bit, implement signbit
4946 as "ARG < 0.0". */
4947 bitpos = fmt->signbit_ro;
4948 if (bitpos < 0)
4949 {
4950 /* But we can't do this if the format supports signed zero. */
4951 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4952
4953 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4954 build_real (TREE_TYPE (arg), dconst0));
4955 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4956 }
4957
4958 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4959 {
4960 imode = int_mode_for_mode (fmode);
4961 gcc_assert (imode != BLKmode);
4962 temp = gen_lowpart (imode, temp);
4963 }
4964 else
4965 {
4966 imode = word_mode;
4967 /* Handle targets with different FP word orders. */
4968 if (FLOAT_WORDS_BIG_ENDIAN)
4969 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4970 else
4971 word = bitpos / BITS_PER_WORD;
4972 temp = operand_subword_force (temp, word, fmode);
4973 bitpos = bitpos % BITS_PER_WORD;
4974 }
4975
4976 /* Force the intermediate word_mode (or narrower) result into a
4977 register. This avoids attempting to create paradoxical SUBREGs
4978 of floating point modes below. */
4979 temp = force_reg (imode, temp);
4980
4981 /* If the bitpos is within the "result mode" lowpart, the operation
4982 can be implement with a single bitwise AND. Otherwise, we need
4983 a right shift and an AND. */
4984
4985 if (bitpos < GET_MODE_BITSIZE (rmode))
4986 {
4987 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4988
4989 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4990 temp = gen_lowpart (rmode, temp);
4991 temp = expand_binop (rmode, and_optab, temp,
4992 immed_wide_int_const (mask, rmode),
4993 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4994 }
4995 else
4996 {
4997 /* Perform a logical right shift to place the signbit in the least
4998 significant bit, then truncate the result to the desired mode
4999 and mask just this bit. */
5000 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5001 temp = gen_lowpart (rmode, temp);
5002 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5003 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5004 }
5005
5006 return temp;
5007 }
5008
5009 /* Expand fork or exec calls. TARGET is the desired target of the
5010 call. EXP is the call. FN is the
5011 identificator of the actual function. IGNORE is nonzero if the
5012 value is to be ignored. */
5013
5014 static rtx
5015 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5016 {
5017 tree id, decl;
5018 tree call;
5019
5020 /* If we are not profiling, just call the function. */
5021 if (!profile_arc_flag)
5022 return NULL_RTX;
5023
5024 /* Otherwise call the wrapper. This should be equivalent for the rest of
5025 compiler, so the code does not diverge, and the wrapper may run the
5026 code necessary for keeping the profiling sane. */
5027
5028 switch (DECL_FUNCTION_CODE (fn))
5029 {
5030 case BUILT_IN_FORK:
5031 id = get_identifier ("__gcov_fork");
5032 break;
5033
5034 case BUILT_IN_EXECL:
5035 id = get_identifier ("__gcov_execl");
5036 break;
5037
5038 case BUILT_IN_EXECV:
5039 id = get_identifier ("__gcov_execv");
5040 break;
5041
5042 case BUILT_IN_EXECLP:
5043 id = get_identifier ("__gcov_execlp");
5044 break;
5045
5046 case BUILT_IN_EXECLE:
5047 id = get_identifier ("__gcov_execle");
5048 break;
5049
5050 case BUILT_IN_EXECVP:
5051 id = get_identifier ("__gcov_execvp");
5052 break;
5053
5054 case BUILT_IN_EXECVE:
5055 id = get_identifier ("__gcov_execve");
5056 break;
5057
5058 default:
5059 gcc_unreachable ();
5060 }
5061
5062 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5063 FUNCTION_DECL, id, TREE_TYPE (fn));
5064 DECL_EXTERNAL (decl) = 1;
5065 TREE_PUBLIC (decl) = 1;
5066 DECL_ARTIFICIAL (decl) = 1;
5067 TREE_NOTHROW (decl) = 1;
5068 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5069 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5070 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5071 return expand_call (call, target, ignore);
5072 }
5073
5074
5075 \f
5076 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5077 the pointer in these functions is void*, the tree optimizers may remove
5078 casts. The mode computed in expand_builtin isn't reliable either, due
5079 to __sync_bool_compare_and_swap.
5080
5081 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5082 group of builtins. This gives us log2 of the mode size. */
5083
5084 static inline machine_mode
5085 get_builtin_sync_mode (int fcode_diff)
5086 {
5087 /* The size is not negotiable, so ask not to get BLKmode in return
5088 if the target indicates that a smaller size would be better. */
5089 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5090 }
5091
5092 /* Expand the memory expression LOC and return the appropriate memory operand
5093 for the builtin_sync operations. */
5094
5095 static rtx
5096 get_builtin_sync_mem (tree loc, machine_mode mode)
5097 {
5098 rtx addr, mem;
5099
5100 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5101 addr = convert_memory_address (Pmode, addr);
5102
5103 /* Note that we explicitly do not want any alias information for this
5104 memory, so that we kill all other live memories. Otherwise we don't
5105 satisfy the full barrier semantics of the intrinsic. */
5106 mem = validize_mem (gen_rtx_MEM (mode, addr));
5107
5108 /* The alignment needs to be at least according to that of the mode. */
5109 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5110 get_pointer_alignment (loc)));
5111 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5112 MEM_VOLATILE_P (mem) = 1;
5113
5114 return mem;
5115 }
5116
5117 /* Make sure an argument is in the right mode.
5118 EXP is the tree argument.
5119 MODE is the mode it should be in. */
5120
5121 static rtx
5122 expand_expr_force_mode (tree exp, machine_mode mode)
5123 {
5124 rtx val;
5125 machine_mode old_mode;
5126
5127 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5128 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5129 of CONST_INTs, where we know the old_mode only from the call argument. */
5130
5131 old_mode = GET_MODE (val);
5132 if (old_mode == VOIDmode)
5133 old_mode = TYPE_MODE (TREE_TYPE (exp));
5134 val = convert_modes (mode, old_mode, val, 1);
5135 return val;
5136 }
5137
5138
5139 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5140 EXP is the CALL_EXPR. CODE is the rtx code
5141 that corresponds to the arithmetic or logical operation from the name;
5142 an exception here is that NOT actually means NAND. TARGET is an optional
5143 place for us to store the results; AFTER is true if this is the
5144 fetch_and_xxx form. */
5145
5146 static rtx
5147 expand_builtin_sync_operation (machine_mode mode, tree exp,
5148 enum rtx_code code, bool after,
5149 rtx target)
5150 {
5151 rtx val, mem;
5152 location_t loc = EXPR_LOCATION (exp);
5153
5154 if (code == NOT && warn_sync_nand)
5155 {
5156 tree fndecl = get_callee_fndecl (exp);
5157 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5158
5159 static bool warned_f_a_n, warned_n_a_f;
5160
5161 switch (fcode)
5162 {
5163 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5164 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5165 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5166 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5167 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5168 if (warned_f_a_n)
5169 break;
5170
5171 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5172 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5173 warned_f_a_n = true;
5174 break;
5175
5176 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5177 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5178 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5179 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5180 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5181 if (warned_n_a_f)
5182 break;
5183
5184 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5185 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5186 warned_n_a_f = true;
5187 break;
5188
5189 default:
5190 gcc_unreachable ();
5191 }
5192 }
5193
5194 /* Expand the operands. */
5195 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5196 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5197
5198 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5199 after);
5200 }
5201
5202 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5203 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5204 true if this is the boolean form. TARGET is a place for us to store the
5205 results; this is NOT optional if IS_BOOL is true. */
5206
5207 static rtx
5208 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5209 bool is_bool, rtx target)
5210 {
5211 rtx old_val, new_val, mem;
5212 rtx *pbool, *poval;
5213
5214 /* Expand the operands. */
5215 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5216 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5217 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5218
5219 pbool = poval = NULL;
5220 if (target != const0_rtx)
5221 {
5222 if (is_bool)
5223 pbool = &target;
5224 else
5225 poval = &target;
5226 }
5227 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5228 false, MEMMODEL_SYNC_SEQ_CST,
5229 MEMMODEL_SYNC_SEQ_CST))
5230 return NULL_RTX;
5231
5232 return target;
5233 }
5234
5235 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5236 general form is actually an atomic exchange, and some targets only
5237 support a reduced form with the second argument being a constant 1.
5238 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5239 the results. */
5240
5241 static rtx
5242 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5243 rtx target)
5244 {
5245 rtx val, mem;
5246
5247 /* Expand the operands. */
5248 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5249 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5250
5251 return expand_sync_lock_test_and_set (target, mem, val);
5252 }
5253
5254 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5255
5256 static void
5257 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5258 {
5259 rtx mem;
5260
5261 /* Expand the operands. */
5262 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5263
5264 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5265 }
5266
5267 /* Given an integer representing an ``enum memmodel'', verify its
5268 correctness and return the memory model enum. */
5269
5270 static enum memmodel
5271 get_memmodel (tree exp)
5272 {
5273 rtx op;
5274 unsigned HOST_WIDE_INT val;
5275
5276 /* If the parameter is not a constant, it's a run time value so we'll just
5277 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5278 if (TREE_CODE (exp) != INTEGER_CST)
5279 return MEMMODEL_SEQ_CST;
5280
5281 op = expand_normal (exp);
5282
5283 val = INTVAL (op);
5284 if (targetm.memmodel_check)
5285 val = targetm.memmodel_check (val);
5286 else if (val & ~MEMMODEL_MASK)
5287 {
5288 warning (OPT_Winvalid_memory_model,
5289 "Unknown architecture specifier in memory model to builtin.");
5290 return MEMMODEL_SEQ_CST;
5291 }
5292
5293 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5294 if (memmodel_base (val) >= MEMMODEL_LAST)
5295 {
5296 warning (OPT_Winvalid_memory_model,
5297 "invalid memory model argument to builtin");
5298 return MEMMODEL_SEQ_CST;
5299 }
5300
5301 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5302 be conservative and promote consume to acquire. */
5303 if (val == MEMMODEL_CONSUME)
5304 val = MEMMODEL_ACQUIRE;
5305
5306 return (enum memmodel) val;
5307 }
5308
5309 /* Expand the __atomic_exchange intrinsic:
5310 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5311 EXP is the CALL_EXPR.
5312 TARGET is an optional place for us to store the results. */
5313
5314 static rtx
5315 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5316 {
5317 rtx val, mem;
5318 enum memmodel model;
5319
5320 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5321
5322 if (!flag_inline_atomics)
5323 return NULL_RTX;
5324
5325 /* Expand the operands. */
5326 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5327 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5328
5329 return expand_atomic_exchange (target, mem, val, model);
5330 }
5331
5332 /* Expand the __atomic_compare_exchange intrinsic:
5333 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5334 TYPE desired, BOOL weak,
5335 enum memmodel success,
5336 enum memmodel failure)
5337 EXP is the CALL_EXPR.
5338 TARGET is an optional place for us to store the results. */
5339
5340 static rtx
5341 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5342 rtx target)
5343 {
5344 rtx expect, desired, mem, oldval;
5345 rtx_code_label *label;
5346 enum memmodel success, failure;
5347 tree weak;
5348 bool is_weak;
5349
5350 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5351 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5352
5353 if (failure > success)
5354 {
5355 warning (OPT_Winvalid_memory_model,
5356 "failure memory model cannot be stronger than success memory "
5357 "model for %<__atomic_compare_exchange%>");
5358 success = MEMMODEL_SEQ_CST;
5359 }
5360
5361 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5362 {
5363 warning (OPT_Winvalid_memory_model,
5364 "invalid failure memory model for "
5365 "%<__atomic_compare_exchange%>");
5366 failure = MEMMODEL_SEQ_CST;
5367 success = MEMMODEL_SEQ_CST;
5368 }
5369
5370
5371 if (!flag_inline_atomics)
5372 return NULL_RTX;
5373
5374 /* Expand the operands. */
5375 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5376
5377 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5378 expect = convert_memory_address (Pmode, expect);
5379 expect = gen_rtx_MEM (mode, expect);
5380 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5381
5382 weak = CALL_EXPR_ARG (exp, 3);
5383 is_weak = false;
5384 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5385 is_weak = true;
5386
5387 if (target == const0_rtx)
5388 target = NULL;
5389
5390 /* Lest the rtl backend create a race condition with an imporoper store
5391 to memory, always create a new pseudo for OLDVAL. */
5392 oldval = NULL;
5393
5394 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5395 is_weak, success, failure))
5396 return NULL_RTX;
5397
5398 /* Conditionally store back to EXPECT, lest we create a race condition
5399 with an improper store to memory. */
5400 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5401 the normal case where EXPECT is totally private, i.e. a register. At
5402 which point the store can be unconditional. */
5403 label = gen_label_rtx ();
5404 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5405 GET_MODE (target), 1, label);
5406 emit_move_insn (expect, oldval);
5407 emit_label (label);
5408
5409 return target;
5410 }
5411
5412 /* Expand the __atomic_load intrinsic:
5413 TYPE __atomic_load (TYPE *object, enum memmodel)
5414 EXP is the CALL_EXPR.
5415 TARGET is an optional place for us to store the results. */
5416
5417 static rtx
5418 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5419 {
5420 rtx mem;
5421 enum memmodel model;
5422
5423 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5424 if (is_mm_release (model) || is_mm_acq_rel (model))
5425 {
5426 warning (OPT_Winvalid_memory_model,
5427 "invalid memory model for %<__atomic_load%>");
5428 model = MEMMODEL_SEQ_CST;
5429 }
5430
5431 if (!flag_inline_atomics)
5432 return NULL_RTX;
5433
5434 /* Expand the operand. */
5435 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5436
5437 return expand_atomic_load (target, mem, model);
5438 }
5439
5440
5441 /* Expand the __atomic_store intrinsic:
5442 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5443 EXP is the CALL_EXPR.
5444 TARGET is an optional place for us to store the results. */
5445
5446 static rtx
5447 expand_builtin_atomic_store (machine_mode mode, tree exp)
5448 {
5449 rtx mem, val;
5450 enum memmodel model;
5451
5452 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5453 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5454 || is_mm_release (model)))
5455 {
5456 warning (OPT_Winvalid_memory_model,
5457 "invalid memory model for %<__atomic_store%>");
5458 model = MEMMODEL_SEQ_CST;
5459 }
5460
5461 if (!flag_inline_atomics)
5462 return NULL_RTX;
5463
5464 /* Expand the operands. */
5465 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5466 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5467
5468 return expand_atomic_store (mem, val, model, false);
5469 }
5470
5471 /* Expand the __atomic_fetch_XXX intrinsic:
5472 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5473 EXP is the CALL_EXPR.
5474 TARGET is an optional place for us to store the results.
5475 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5476 FETCH_AFTER is true if returning the result of the operation.
5477 FETCH_AFTER is false if returning the value before the operation.
5478 IGNORE is true if the result is not used.
5479 EXT_CALL is the correct builtin for an external call if this cannot be
5480 resolved to an instruction sequence. */
5481
5482 static rtx
5483 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5484 enum rtx_code code, bool fetch_after,
5485 bool ignore, enum built_in_function ext_call)
5486 {
5487 rtx val, mem, ret;
5488 enum memmodel model;
5489 tree fndecl;
5490 tree addr;
5491
5492 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5493
5494 /* Expand the operands. */
5495 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5496 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5497
5498 /* Only try generating instructions if inlining is turned on. */
5499 if (flag_inline_atomics)
5500 {
5501 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5502 if (ret)
5503 return ret;
5504 }
5505
5506 /* Return if a different routine isn't needed for the library call. */
5507 if (ext_call == BUILT_IN_NONE)
5508 return NULL_RTX;
5509
5510 /* Change the call to the specified function. */
5511 fndecl = get_callee_fndecl (exp);
5512 addr = CALL_EXPR_FN (exp);
5513 STRIP_NOPS (addr);
5514
5515 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5516 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5517
5518 /* Expand the call here so we can emit trailing code. */
5519 ret = expand_call (exp, target, ignore);
5520
5521 /* Replace the original function just in case it matters. */
5522 TREE_OPERAND (addr, 0) = fndecl;
5523
5524 /* Then issue the arithmetic correction to return the right result. */
5525 if (!ignore)
5526 {
5527 if (code == NOT)
5528 {
5529 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5530 OPTAB_LIB_WIDEN);
5531 ret = expand_simple_unop (mode, NOT, ret, target, true);
5532 }
5533 else
5534 ret = expand_simple_binop (mode, code, ret, val, target, true,
5535 OPTAB_LIB_WIDEN);
5536 }
5537 return ret;
5538 }
5539
5540 /* Expand an atomic clear operation.
5541 void _atomic_clear (BOOL *obj, enum memmodel)
5542 EXP is the call expression. */
5543
5544 static rtx
5545 expand_builtin_atomic_clear (tree exp)
5546 {
5547 machine_mode mode;
5548 rtx mem, ret;
5549 enum memmodel model;
5550
5551 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5552 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5553 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5554
5555 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5556 {
5557 warning (OPT_Winvalid_memory_model,
5558 "invalid memory model for %<__atomic_store%>");
5559 model = MEMMODEL_SEQ_CST;
5560 }
5561
5562 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5563 Failing that, a store is issued by __atomic_store. The only way this can
5564 fail is if the bool type is larger than a word size. Unlikely, but
5565 handle it anyway for completeness. Assume a single threaded model since
5566 there is no atomic support in this case, and no barriers are required. */
5567 ret = expand_atomic_store (mem, const0_rtx, model, true);
5568 if (!ret)
5569 emit_move_insn (mem, const0_rtx);
5570 return const0_rtx;
5571 }
5572
5573 /* Expand an atomic test_and_set operation.
5574 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5575 EXP is the call expression. */
5576
5577 static rtx
5578 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5579 {
5580 rtx mem;
5581 enum memmodel model;
5582 machine_mode mode;
5583
5584 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5585 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5586 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5587
5588 return expand_atomic_test_and_set (target, mem, model);
5589 }
5590
5591
5592 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5593 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5594
5595 static tree
5596 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5597 {
5598 int size;
5599 machine_mode mode;
5600 unsigned int mode_align, type_align;
5601
5602 if (TREE_CODE (arg0) != INTEGER_CST)
5603 return NULL_TREE;
5604
5605 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5606 mode = mode_for_size (size, MODE_INT, 0);
5607 mode_align = GET_MODE_ALIGNMENT (mode);
5608
5609 if (TREE_CODE (arg1) == INTEGER_CST)
5610 {
5611 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5612
5613 /* Either this argument is null, or it's a fake pointer encoding
5614 the alignment of the object. */
5615 val = val & -val;
5616 val *= BITS_PER_UNIT;
5617
5618 if (val == 0 || mode_align < val)
5619 type_align = mode_align;
5620 else
5621 type_align = val;
5622 }
5623 else
5624 {
5625 tree ttype = TREE_TYPE (arg1);
5626
5627 /* This function is usually invoked and folded immediately by the front
5628 end before anything else has a chance to look at it. The pointer
5629 parameter at this point is usually cast to a void *, so check for that
5630 and look past the cast. */
5631 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5632 && VOID_TYPE_P (TREE_TYPE (ttype)))
5633 arg1 = TREE_OPERAND (arg1, 0);
5634
5635 ttype = TREE_TYPE (arg1);
5636 gcc_assert (POINTER_TYPE_P (ttype));
5637
5638 /* Get the underlying type of the object. */
5639 ttype = TREE_TYPE (ttype);
5640 type_align = TYPE_ALIGN (ttype);
5641 }
5642
5643 /* If the object has smaller alignment, the lock free routines cannot
5644 be used. */
5645 if (type_align < mode_align)
5646 return boolean_false_node;
5647
5648 /* Check if a compare_and_swap pattern exists for the mode which represents
5649 the required size. The pattern is not allowed to fail, so the existence
5650 of the pattern indicates support is present. */
5651 if (can_compare_and_swap_p (mode, true))
5652 return boolean_true_node;
5653 else
5654 return boolean_false_node;
5655 }
5656
5657 /* Return true if the parameters to call EXP represent an object which will
5658 always generate lock free instructions. The first argument represents the
5659 size of the object, and the second parameter is a pointer to the object
5660 itself. If NULL is passed for the object, then the result is based on
5661 typical alignment for an object of the specified size. Otherwise return
5662 false. */
5663
5664 static rtx
5665 expand_builtin_atomic_always_lock_free (tree exp)
5666 {
5667 tree size;
5668 tree arg0 = CALL_EXPR_ARG (exp, 0);
5669 tree arg1 = CALL_EXPR_ARG (exp, 1);
5670
5671 if (TREE_CODE (arg0) != INTEGER_CST)
5672 {
5673 error ("non-constant argument 1 to __atomic_always_lock_free");
5674 return const0_rtx;
5675 }
5676
5677 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5678 if (size == boolean_true_node)
5679 return const1_rtx;
5680 return const0_rtx;
5681 }
5682
5683 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5684 is lock free on this architecture. */
5685
5686 static tree
5687 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5688 {
5689 if (!flag_inline_atomics)
5690 return NULL_TREE;
5691
5692 /* If it isn't always lock free, don't generate a result. */
5693 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5694 return boolean_true_node;
5695
5696 return NULL_TREE;
5697 }
5698
5699 /* Return true if the parameters to call EXP represent an object which will
5700 always generate lock free instructions. The first argument represents the
5701 size of the object, and the second parameter is a pointer to the object
5702 itself. If NULL is passed for the object, then the result is based on
5703 typical alignment for an object of the specified size. Otherwise return
5704 NULL*/
5705
5706 static rtx
5707 expand_builtin_atomic_is_lock_free (tree exp)
5708 {
5709 tree size;
5710 tree arg0 = CALL_EXPR_ARG (exp, 0);
5711 tree arg1 = CALL_EXPR_ARG (exp, 1);
5712
5713 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5714 {
5715 error ("non-integer argument 1 to __atomic_is_lock_free");
5716 return NULL_RTX;
5717 }
5718
5719 if (!flag_inline_atomics)
5720 return NULL_RTX;
5721
5722 /* If the value is known at compile time, return the RTX for it. */
5723 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5724 if (size == boolean_true_node)
5725 return const1_rtx;
5726
5727 return NULL_RTX;
5728 }
5729
5730 /* Expand the __atomic_thread_fence intrinsic:
5731 void __atomic_thread_fence (enum memmodel)
5732 EXP is the CALL_EXPR. */
5733
5734 static void
5735 expand_builtin_atomic_thread_fence (tree exp)
5736 {
5737 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5738 expand_mem_thread_fence (model);
5739 }
5740
5741 /* Expand the __atomic_signal_fence intrinsic:
5742 void __atomic_signal_fence (enum memmodel)
5743 EXP is the CALL_EXPR. */
5744
5745 static void
5746 expand_builtin_atomic_signal_fence (tree exp)
5747 {
5748 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5749 expand_mem_signal_fence (model);
5750 }
5751
5752 /* Expand the __sync_synchronize intrinsic. */
5753
5754 static void
5755 expand_builtin_sync_synchronize (void)
5756 {
5757 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5758 }
5759
5760 static rtx
5761 expand_builtin_thread_pointer (tree exp, rtx target)
5762 {
5763 enum insn_code icode;
5764 if (!validate_arglist (exp, VOID_TYPE))
5765 return const0_rtx;
5766 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5767 if (icode != CODE_FOR_nothing)
5768 {
5769 struct expand_operand op;
5770 /* If the target is not sutitable then create a new target. */
5771 if (target == NULL_RTX
5772 || !REG_P (target)
5773 || GET_MODE (target) != Pmode)
5774 target = gen_reg_rtx (Pmode);
5775 create_output_operand (&op, target, Pmode);
5776 expand_insn (icode, 1, &op);
5777 return target;
5778 }
5779 error ("__builtin_thread_pointer is not supported on this target");
5780 return const0_rtx;
5781 }
5782
5783 static void
5784 expand_builtin_set_thread_pointer (tree exp)
5785 {
5786 enum insn_code icode;
5787 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5788 return;
5789 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5790 if (icode != CODE_FOR_nothing)
5791 {
5792 struct expand_operand op;
5793 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5794 Pmode, EXPAND_NORMAL);
5795 create_input_operand (&op, val, Pmode);
5796 expand_insn (icode, 1, &op);
5797 return;
5798 }
5799 error ("__builtin_set_thread_pointer is not supported on this target");
5800 }
5801
5802 \f
5803 /* Emit code to restore the current value of stack. */
5804
5805 static void
5806 expand_stack_restore (tree var)
5807 {
5808 rtx_insn *prev;
5809 rtx sa = expand_normal (var);
5810
5811 sa = convert_memory_address (Pmode, sa);
5812
5813 prev = get_last_insn ();
5814 emit_stack_restore (SAVE_BLOCK, sa);
5815
5816 record_new_stack_level ();
5817
5818 fixup_args_size_notes (prev, get_last_insn (), 0);
5819 }
5820
5821 /* Emit code to save the current value of stack. */
5822
5823 static rtx
5824 expand_stack_save (void)
5825 {
5826 rtx ret = NULL_RTX;
5827
5828 emit_stack_save (SAVE_BLOCK, &ret);
5829 return ret;
5830 }
5831
5832
5833 /* Expand an expression EXP that calls a built-in function,
5834 with result going to TARGET if that's convenient
5835 (and in mode MODE if that's convenient).
5836 SUBTARGET may be used as the target for computing one of EXP's operands.
5837 IGNORE is nonzero if the value is to be ignored. */
5838
5839 rtx
5840 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5841 int ignore)
5842 {
5843 tree fndecl = get_callee_fndecl (exp);
5844 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5845 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5846 int flags;
5847
5848 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5849 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5850
5851 /* When ASan is enabled, we don't want to expand some memory/string
5852 builtins and rely on libsanitizer's hooks. This allows us to avoid
5853 redundant checks and be sure, that possible overflow will be detected
5854 by ASan. */
5855
5856 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5857 return expand_call (exp, target, ignore);
5858
5859 /* When not optimizing, generate calls to library functions for a certain
5860 set of builtins. */
5861 if (!optimize
5862 && !called_as_built_in (fndecl)
5863 && fcode != BUILT_IN_FORK
5864 && fcode != BUILT_IN_EXECL
5865 && fcode != BUILT_IN_EXECV
5866 && fcode != BUILT_IN_EXECLP
5867 && fcode != BUILT_IN_EXECLE
5868 && fcode != BUILT_IN_EXECVP
5869 && fcode != BUILT_IN_EXECVE
5870 && fcode != BUILT_IN_ALLOCA
5871 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5872 && fcode != BUILT_IN_FREE
5873 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5874 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5875 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5876 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5877 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5878 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5879 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5880 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5881 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5882 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5883 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5884 && fcode != BUILT_IN_CHKP_BNDRET)
5885 return expand_call (exp, target, ignore);
5886
5887 /* The built-in function expanders test for target == const0_rtx
5888 to determine whether the function's result will be ignored. */
5889 if (ignore)
5890 target = const0_rtx;
5891
5892 /* If the result of a pure or const built-in function is ignored, and
5893 none of its arguments are volatile, we can avoid expanding the
5894 built-in call and just evaluate the arguments for side-effects. */
5895 if (target == const0_rtx
5896 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5897 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5898 {
5899 bool volatilep = false;
5900 tree arg;
5901 call_expr_arg_iterator iter;
5902
5903 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5904 if (TREE_THIS_VOLATILE (arg))
5905 {
5906 volatilep = true;
5907 break;
5908 }
5909
5910 if (! volatilep)
5911 {
5912 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5913 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5914 return const0_rtx;
5915 }
5916 }
5917
5918 /* expand_builtin_with_bounds is supposed to be used for
5919 instrumented builtin calls. */
5920 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5921
5922 switch (fcode)
5923 {
5924 CASE_FLT_FN (BUILT_IN_FABS):
5925 case BUILT_IN_FABSD32:
5926 case BUILT_IN_FABSD64:
5927 case BUILT_IN_FABSD128:
5928 target = expand_builtin_fabs (exp, target, subtarget);
5929 if (target)
5930 return target;
5931 break;
5932
5933 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5934 target = expand_builtin_copysign (exp, target, subtarget);
5935 if (target)
5936 return target;
5937 break;
5938
5939 /* Just do a normal library call if we were unable to fold
5940 the values. */
5941 CASE_FLT_FN (BUILT_IN_CABS):
5942 break;
5943
5944 CASE_FLT_FN (BUILT_IN_EXP):
5945 CASE_FLT_FN (BUILT_IN_EXP10):
5946 CASE_FLT_FN (BUILT_IN_POW10):
5947 CASE_FLT_FN (BUILT_IN_EXP2):
5948 CASE_FLT_FN (BUILT_IN_EXPM1):
5949 CASE_FLT_FN (BUILT_IN_LOGB):
5950 CASE_FLT_FN (BUILT_IN_LOG):
5951 CASE_FLT_FN (BUILT_IN_LOG10):
5952 CASE_FLT_FN (BUILT_IN_LOG2):
5953 CASE_FLT_FN (BUILT_IN_LOG1P):
5954 CASE_FLT_FN (BUILT_IN_TAN):
5955 CASE_FLT_FN (BUILT_IN_ASIN):
5956 CASE_FLT_FN (BUILT_IN_ACOS):
5957 CASE_FLT_FN (BUILT_IN_ATAN):
5958 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5959 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5960 because of possible accuracy problems. */
5961 if (! flag_unsafe_math_optimizations)
5962 break;
5963 CASE_FLT_FN (BUILT_IN_SQRT):
5964 CASE_FLT_FN (BUILT_IN_FLOOR):
5965 CASE_FLT_FN (BUILT_IN_CEIL):
5966 CASE_FLT_FN (BUILT_IN_TRUNC):
5967 CASE_FLT_FN (BUILT_IN_ROUND):
5968 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5969 CASE_FLT_FN (BUILT_IN_RINT):
5970 target = expand_builtin_mathfn (exp, target, subtarget);
5971 if (target)
5972 return target;
5973 break;
5974
5975 CASE_FLT_FN (BUILT_IN_FMA):
5976 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5977 if (target)
5978 return target;
5979 break;
5980
5981 CASE_FLT_FN (BUILT_IN_ILOGB):
5982 if (! flag_unsafe_math_optimizations)
5983 break;
5984 CASE_FLT_FN (BUILT_IN_ISINF):
5985 CASE_FLT_FN (BUILT_IN_FINITE):
5986 case BUILT_IN_ISFINITE:
5987 case BUILT_IN_ISNORMAL:
5988 target = expand_builtin_interclass_mathfn (exp, target);
5989 if (target)
5990 return target;
5991 break;
5992
5993 CASE_FLT_FN (BUILT_IN_ICEIL):
5994 CASE_FLT_FN (BUILT_IN_LCEIL):
5995 CASE_FLT_FN (BUILT_IN_LLCEIL):
5996 CASE_FLT_FN (BUILT_IN_LFLOOR):
5997 CASE_FLT_FN (BUILT_IN_IFLOOR):
5998 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5999 target = expand_builtin_int_roundingfn (exp, target);
6000 if (target)
6001 return target;
6002 break;
6003
6004 CASE_FLT_FN (BUILT_IN_IRINT):
6005 CASE_FLT_FN (BUILT_IN_LRINT):
6006 CASE_FLT_FN (BUILT_IN_LLRINT):
6007 CASE_FLT_FN (BUILT_IN_IROUND):
6008 CASE_FLT_FN (BUILT_IN_LROUND):
6009 CASE_FLT_FN (BUILT_IN_LLROUND):
6010 target = expand_builtin_int_roundingfn_2 (exp, target);
6011 if (target)
6012 return target;
6013 break;
6014
6015 CASE_FLT_FN (BUILT_IN_POWI):
6016 target = expand_builtin_powi (exp, target);
6017 if (target)
6018 return target;
6019 break;
6020
6021 CASE_FLT_FN (BUILT_IN_ATAN2):
6022 CASE_FLT_FN (BUILT_IN_LDEXP):
6023 CASE_FLT_FN (BUILT_IN_SCALB):
6024 CASE_FLT_FN (BUILT_IN_SCALBN):
6025 CASE_FLT_FN (BUILT_IN_SCALBLN):
6026 if (! flag_unsafe_math_optimizations)
6027 break;
6028
6029 CASE_FLT_FN (BUILT_IN_FMOD):
6030 CASE_FLT_FN (BUILT_IN_REMAINDER):
6031 CASE_FLT_FN (BUILT_IN_DREM):
6032 CASE_FLT_FN (BUILT_IN_POW):
6033 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6034 if (target)
6035 return target;
6036 break;
6037
6038 CASE_FLT_FN (BUILT_IN_CEXPI):
6039 target = expand_builtin_cexpi (exp, target);
6040 gcc_assert (target);
6041 return target;
6042
6043 CASE_FLT_FN (BUILT_IN_SIN):
6044 CASE_FLT_FN (BUILT_IN_COS):
6045 if (! flag_unsafe_math_optimizations)
6046 break;
6047 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6048 if (target)
6049 return target;
6050 break;
6051
6052 CASE_FLT_FN (BUILT_IN_SINCOS):
6053 if (! flag_unsafe_math_optimizations)
6054 break;
6055 target = expand_builtin_sincos (exp);
6056 if (target)
6057 return target;
6058 break;
6059
6060 case BUILT_IN_APPLY_ARGS:
6061 return expand_builtin_apply_args ();
6062
6063 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6064 FUNCTION with a copy of the parameters described by
6065 ARGUMENTS, and ARGSIZE. It returns a block of memory
6066 allocated on the stack into which is stored all the registers
6067 that might possibly be used for returning the result of a
6068 function. ARGUMENTS is the value returned by
6069 __builtin_apply_args. ARGSIZE is the number of bytes of
6070 arguments that must be copied. ??? How should this value be
6071 computed? We'll also need a safe worst case value for varargs
6072 functions. */
6073 case BUILT_IN_APPLY:
6074 if (!validate_arglist (exp, POINTER_TYPE,
6075 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6076 && !validate_arglist (exp, REFERENCE_TYPE,
6077 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6078 return const0_rtx;
6079 else
6080 {
6081 rtx ops[3];
6082
6083 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6084 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6085 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6086
6087 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6088 }
6089
6090 /* __builtin_return (RESULT) causes the function to return the
6091 value described by RESULT. RESULT is address of the block of
6092 memory returned by __builtin_apply. */
6093 case BUILT_IN_RETURN:
6094 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6095 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6096 return const0_rtx;
6097
6098 case BUILT_IN_SAVEREGS:
6099 return expand_builtin_saveregs ();
6100
6101 case BUILT_IN_VA_ARG_PACK:
6102 /* All valid uses of __builtin_va_arg_pack () are removed during
6103 inlining. */
6104 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6105 return const0_rtx;
6106
6107 case BUILT_IN_VA_ARG_PACK_LEN:
6108 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6109 inlining. */
6110 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6111 return const0_rtx;
6112
6113 /* Return the address of the first anonymous stack arg. */
6114 case BUILT_IN_NEXT_ARG:
6115 if (fold_builtin_next_arg (exp, false))
6116 return const0_rtx;
6117 return expand_builtin_next_arg ();
6118
6119 case BUILT_IN_CLEAR_CACHE:
6120 target = expand_builtin___clear_cache (exp);
6121 if (target)
6122 return target;
6123 break;
6124
6125 case BUILT_IN_CLASSIFY_TYPE:
6126 return expand_builtin_classify_type (exp);
6127
6128 case BUILT_IN_CONSTANT_P:
6129 return const0_rtx;
6130
6131 case BUILT_IN_FRAME_ADDRESS:
6132 case BUILT_IN_RETURN_ADDRESS:
6133 return expand_builtin_frame_address (fndecl, exp);
6134
6135 /* Returns the address of the area where the structure is returned.
6136 0 otherwise. */
6137 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6138 if (call_expr_nargs (exp) != 0
6139 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6140 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6141 return const0_rtx;
6142 else
6143 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6144
6145 case BUILT_IN_ALLOCA:
6146 case BUILT_IN_ALLOCA_WITH_ALIGN:
6147 /* If the allocation stems from the declaration of a variable-sized
6148 object, it cannot accumulate. */
6149 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6150 if (target)
6151 return target;
6152 break;
6153
6154 case BUILT_IN_STACK_SAVE:
6155 return expand_stack_save ();
6156
6157 case BUILT_IN_STACK_RESTORE:
6158 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6159 return const0_rtx;
6160
6161 case BUILT_IN_BSWAP16:
6162 case BUILT_IN_BSWAP32:
6163 case BUILT_IN_BSWAP64:
6164 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6165 if (target)
6166 return target;
6167 break;
6168
6169 CASE_INT_FN (BUILT_IN_FFS):
6170 target = expand_builtin_unop (target_mode, exp, target,
6171 subtarget, ffs_optab);
6172 if (target)
6173 return target;
6174 break;
6175
6176 CASE_INT_FN (BUILT_IN_CLZ):
6177 target = expand_builtin_unop (target_mode, exp, target,
6178 subtarget, clz_optab);
6179 if (target)
6180 return target;
6181 break;
6182
6183 CASE_INT_FN (BUILT_IN_CTZ):
6184 target = expand_builtin_unop (target_mode, exp, target,
6185 subtarget, ctz_optab);
6186 if (target)
6187 return target;
6188 break;
6189
6190 CASE_INT_FN (BUILT_IN_CLRSB):
6191 target = expand_builtin_unop (target_mode, exp, target,
6192 subtarget, clrsb_optab);
6193 if (target)
6194 return target;
6195 break;
6196
6197 CASE_INT_FN (BUILT_IN_POPCOUNT):
6198 target = expand_builtin_unop (target_mode, exp, target,
6199 subtarget, popcount_optab);
6200 if (target)
6201 return target;
6202 break;
6203
6204 CASE_INT_FN (BUILT_IN_PARITY):
6205 target = expand_builtin_unop (target_mode, exp, target,
6206 subtarget, parity_optab);
6207 if (target)
6208 return target;
6209 break;
6210
6211 case BUILT_IN_STRLEN:
6212 target = expand_builtin_strlen (exp, target, target_mode);
6213 if (target)
6214 return target;
6215 break;
6216
6217 case BUILT_IN_STRCPY:
6218 target = expand_builtin_strcpy (exp, target);
6219 if (target)
6220 return target;
6221 break;
6222
6223 case BUILT_IN_STRNCPY:
6224 target = expand_builtin_strncpy (exp, target);
6225 if (target)
6226 return target;
6227 break;
6228
6229 case BUILT_IN_STPCPY:
6230 target = expand_builtin_stpcpy (exp, target, mode);
6231 if (target)
6232 return target;
6233 break;
6234
6235 case BUILT_IN_MEMCPY:
6236 target = expand_builtin_memcpy (exp, target);
6237 if (target)
6238 return target;
6239 break;
6240
6241 case BUILT_IN_MEMPCPY:
6242 target = expand_builtin_mempcpy (exp, target, mode);
6243 if (target)
6244 return target;
6245 break;
6246
6247 case BUILT_IN_MEMSET:
6248 target = expand_builtin_memset (exp, target, mode);
6249 if (target)
6250 return target;
6251 break;
6252
6253 case BUILT_IN_BZERO:
6254 target = expand_builtin_bzero (exp);
6255 if (target)
6256 return target;
6257 break;
6258
6259 case BUILT_IN_STRCMP:
6260 target = expand_builtin_strcmp (exp, target);
6261 if (target)
6262 return target;
6263 break;
6264
6265 case BUILT_IN_STRNCMP:
6266 target = expand_builtin_strncmp (exp, target, mode);
6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_BCMP:
6272 case BUILT_IN_MEMCMP:
6273 target = expand_builtin_memcmp (exp, target);
6274 if (target)
6275 return target;
6276 break;
6277
6278 case BUILT_IN_SETJMP:
6279 /* This should have been lowered to the builtins below. */
6280 gcc_unreachable ();
6281
6282 case BUILT_IN_SETJMP_SETUP:
6283 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6284 and the receiver label. */
6285 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6286 {
6287 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6288 VOIDmode, EXPAND_NORMAL);
6289 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6290 rtx_insn *label_r = label_rtx (label);
6291
6292 /* This is copied from the handling of non-local gotos. */
6293 expand_builtin_setjmp_setup (buf_addr, label_r);
6294 nonlocal_goto_handler_labels
6295 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6296 nonlocal_goto_handler_labels);
6297 /* ??? Do not let expand_label treat us as such since we would
6298 not want to be both on the list of non-local labels and on
6299 the list of forced labels. */
6300 FORCED_LABEL (label) = 0;
6301 return const0_rtx;
6302 }
6303 break;
6304
6305 case BUILT_IN_SETJMP_RECEIVER:
6306 /* __builtin_setjmp_receiver is passed the receiver label. */
6307 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6308 {
6309 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6310 rtx_insn *label_r = label_rtx (label);
6311
6312 expand_builtin_setjmp_receiver (label_r);
6313 return const0_rtx;
6314 }
6315 break;
6316
6317 /* __builtin_longjmp is passed a pointer to an array of five words.
6318 It's similar to the C library longjmp function but works with
6319 __builtin_setjmp above. */
6320 case BUILT_IN_LONGJMP:
6321 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6322 {
6323 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6324 VOIDmode, EXPAND_NORMAL);
6325 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6326
6327 if (value != const1_rtx)
6328 {
6329 error ("%<__builtin_longjmp%> second argument must be 1");
6330 return const0_rtx;
6331 }
6332
6333 expand_builtin_longjmp (buf_addr, value);
6334 return const0_rtx;
6335 }
6336 break;
6337
6338 case BUILT_IN_NONLOCAL_GOTO:
6339 target = expand_builtin_nonlocal_goto (exp);
6340 if (target)
6341 return target;
6342 break;
6343
6344 /* This updates the setjmp buffer that is its argument with the value
6345 of the current stack pointer. */
6346 case BUILT_IN_UPDATE_SETJMP_BUF:
6347 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6348 {
6349 rtx buf_addr
6350 = expand_normal (CALL_EXPR_ARG (exp, 0));
6351
6352 expand_builtin_update_setjmp_buf (buf_addr);
6353 return const0_rtx;
6354 }
6355 break;
6356
6357 case BUILT_IN_TRAP:
6358 expand_builtin_trap ();
6359 return const0_rtx;
6360
6361 case BUILT_IN_UNREACHABLE:
6362 expand_builtin_unreachable ();
6363 return const0_rtx;
6364
6365 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6366 case BUILT_IN_SIGNBITD32:
6367 case BUILT_IN_SIGNBITD64:
6368 case BUILT_IN_SIGNBITD128:
6369 target = expand_builtin_signbit (exp, target);
6370 if (target)
6371 return target;
6372 break;
6373
6374 /* Various hooks for the DWARF 2 __throw routine. */
6375 case BUILT_IN_UNWIND_INIT:
6376 expand_builtin_unwind_init ();
6377 return const0_rtx;
6378 case BUILT_IN_DWARF_CFA:
6379 return virtual_cfa_rtx;
6380 #ifdef DWARF2_UNWIND_INFO
6381 case BUILT_IN_DWARF_SP_COLUMN:
6382 return expand_builtin_dwarf_sp_column ();
6383 case BUILT_IN_INIT_DWARF_REG_SIZES:
6384 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6385 return const0_rtx;
6386 #endif
6387 case BUILT_IN_FROB_RETURN_ADDR:
6388 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6389 case BUILT_IN_EXTRACT_RETURN_ADDR:
6390 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6391 case BUILT_IN_EH_RETURN:
6392 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6393 CALL_EXPR_ARG (exp, 1));
6394 return const0_rtx;
6395 case BUILT_IN_EH_RETURN_DATA_REGNO:
6396 return expand_builtin_eh_return_data_regno (exp);
6397 case BUILT_IN_EXTEND_POINTER:
6398 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6399 case BUILT_IN_EH_POINTER:
6400 return expand_builtin_eh_pointer (exp);
6401 case BUILT_IN_EH_FILTER:
6402 return expand_builtin_eh_filter (exp);
6403 case BUILT_IN_EH_COPY_VALUES:
6404 return expand_builtin_eh_copy_values (exp);
6405
6406 case BUILT_IN_VA_START:
6407 return expand_builtin_va_start (exp);
6408 case BUILT_IN_VA_END:
6409 return expand_builtin_va_end (exp);
6410 case BUILT_IN_VA_COPY:
6411 return expand_builtin_va_copy (exp);
6412 case BUILT_IN_EXPECT:
6413 return expand_builtin_expect (exp, target);
6414 case BUILT_IN_ASSUME_ALIGNED:
6415 return expand_builtin_assume_aligned (exp, target);
6416 case BUILT_IN_PREFETCH:
6417 expand_builtin_prefetch (exp);
6418 return const0_rtx;
6419
6420 case BUILT_IN_INIT_TRAMPOLINE:
6421 return expand_builtin_init_trampoline (exp, true);
6422 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6423 return expand_builtin_init_trampoline (exp, false);
6424 case BUILT_IN_ADJUST_TRAMPOLINE:
6425 return expand_builtin_adjust_trampoline (exp);
6426
6427 case BUILT_IN_FORK:
6428 case BUILT_IN_EXECL:
6429 case BUILT_IN_EXECV:
6430 case BUILT_IN_EXECLP:
6431 case BUILT_IN_EXECLE:
6432 case BUILT_IN_EXECVP:
6433 case BUILT_IN_EXECVE:
6434 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6435 if (target)
6436 return target;
6437 break;
6438
6439 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6440 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6441 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6442 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6443 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6444 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6445 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6446 if (target)
6447 return target;
6448 break;
6449
6450 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6451 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6452 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6453 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6454 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6455 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6456 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6457 if (target)
6458 return target;
6459 break;
6460
6461 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6462 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6463 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6464 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6465 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6466 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6467 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6468 if (target)
6469 return target;
6470 break;
6471
6472 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6473 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6474 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6475 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6476 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6477 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6478 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6479 if (target)
6480 return target;
6481 break;
6482
6483 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6484 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6485 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6486 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6487 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6488 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6489 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6490 if (target)
6491 return target;
6492 break;
6493
6494 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6495 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6496 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6497 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6498 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6500 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6501 if (target)
6502 return target;
6503 break;
6504
6505 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6506 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6507 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6508 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6509 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6510 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6511 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6512 if (target)
6513 return target;
6514 break;
6515
6516 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6517 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6518 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6519 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6520 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6521 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6522 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6523 if (target)
6524 return target;
6525 break;
6526
6527 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6528 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6529 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6530 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6531 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6532 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6533 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6534 if (target)
6535 return target;
6536 break;
6537
6538 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6539 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6540 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6541 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6542 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6544 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6545 if (target)
6546 return target;
6547 break;
6548
6549 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6550 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6551 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6552 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6553 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6555 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6556 if (target)
6557 return target;
6558 break;
6559
6560 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6561 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6562 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6563 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6564 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6565 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6566 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6567 if (target)
6568 return target;
6569 break;
6570
6571 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6572 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6573 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6574 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6575 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6576 if (mode == VOIDmode)
6577 mode = TYPE_MODE (boolean_type_node);
6578 if (!target || !register_operand (target, mode))
6579 target = gen_reg_rtx (mode);
6580
6581 mode = get_builtin_sync_mode
6582 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6583 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6584 if (target)
6585 return target;
6586 break;
6587
6588 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6589 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6590 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6591 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6592 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6593 mode = get_builtin_sync_mode
6594 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6595 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6596 if (target)
6597 return target;
6598 break;
6599
6600 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6601 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6603 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6604 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6605 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6606 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6607 if (target)
6608 return target;
6609 break;
6610
6611 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6612 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6613 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6614 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6615 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6616 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6617 expand_builtin_sync_lock_release (mode, exp);
6618 return const0_rtx;
6619
6620 case BUILT_IN_SYNC_SYNCHRONIZE:
6621 expand_builtin_sync_synchronize ();
6622 return const0_rtx;
6623
6624 case BUILT_IN_ATOMIC_EXCHANGE_1:
6625 case BUILT_IN_ATOMIC_EXCHANGE_2:
6626 case BUILT_IN_ATOMIC_EXCHANGE_4:
6627 case BUILT_IN_ATOMIC_EXCHANGE_8:
6628 case BUILT_IN_ATOMIC_EXCHANGE_16:
6629 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6630 target = expand_builtin_atomic_exchange (mode, exp, target);
6631 if (target)
6632 return target;
6633 break;
6634
6635 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6636 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6637 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6638 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6639 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6640 {
6641 unsigned int nargs, z;
6642 vec<tree, va_gc> *vec;
6643
6644 mode =
6645 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6646 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6647 if (target)
6648 return target;
6649
6650 /* If this is turned into an external library call, the weak parameter
6651 must be dropped to match the expected parameter list. */
6652 nargs = call_expr_nargs (exp);
6653 vec_alloc (vec, nargs - 1);
6654 for (z = 0; z < 3; z++)
6655 vec->quick_push (CALL_EXPR_ARG (exp, z));
6656 /* Skip the boolean weak parameter. */
6657 for (z = 4; z < 6; z++)
6658 vec->quick_push (CALL_EXPR_ARG (exp, z));
6659 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6660 break;
6661 }
6662
6663 case BUILT_IN_ATOMIC_LOAD_1:
6664 case BUILT_IN_ATOMIC_LOAD_2:
6665 case BUILT_IN_ATOMIC_LOAD_4:
6666 case BUILT_IN_ATOMIC_LOAD_8:
6667 case BUILT_IN_ATOMIC_LOAD_16:
6668 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6669 target = expand_builtin_atomic_load (mode, exp, target);
6670 if (target)
6671 return target;
6672 break;
6673
6674 case BUILT_IN_ATOMIC_STORE_1:
6675 case BUILT_IN_ATOMIC_STORE_2:
6676 case BUILT_IN_ATOMIC_STORE_4:
6677 case BUILT_IN_ATOMIC_STORE_8:
6678 case BUILT_IN_ATOMIC_STORE_16:
6679 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6680 target = expand_builtin_atomic_store (mode, exp);
6681 if (target)
6682 return const0_rtx;
6683 break;
6684
6685 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6686 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6687 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6688 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6689 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6690 {
6691 enum built_in_function lib;
6692 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6693 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6694 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6695 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6696 ignore, lib);
6697 if (target)
6698 return target;
6699 break;
6700 }
6701 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6702 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6703 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6704 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6705 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6706 {
6707 enum built_in_function lib;
6708 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6709 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6710 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6711 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6712 ignore, lib);
6713 if (target)
6714 return target;
6715 break;
6716 }
6717 case BUILT_IN_ATOMIC_AND_FETCH_1:
6718 case BUILT_IN_ATOMIC_AND_FETCH_2:
6719 case BUILT_IN_ATOMIC_AND_FETCH_4:
6720 case BUILT_IN_ATOMIC_AND_FETCH_8:
6721 case BUILT_IN_ATOMIC_AND_FETCH_16:
6722 {
6723 enum built_in_function lib;
6724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6725 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6726 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6727 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6728 ignore, lib);
6729 if (target)
6730 return target;
6731 break;
6732 }
6733 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6734 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6735 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6736 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6737 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6738 {
6739 enum built_in_function lib;
6740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6741 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6742 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6744 ignore, lib);
6745 if (target)
6746 return target;
6747 break;
6748 }
6749 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6750 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6751 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6752 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6753 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6754 {
6755 enum built_in_function lib;
6756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6757 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6758 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6759 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6760 ignore, lib);
6761 if (target)
6762 return target;
6763 break;
6764 }
6765 case BUILT_IN_ATOMIC_OR_FETCH_1:
6766 case BUILT_IN_ATOMIC_OR_FETCH_2:
6767 case BUILT_IN_ATOMIC_OR_FETCH_4:
6768 case BUILT_IN_ATOMIC_OR_FETCH_8:
6769 case BUILT_IN_ATOMIC_OR_FETCH_16:
6770 {
6771 enum built_in_function lib;
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6773 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6774 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6775 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6776 ignore, lib);
6777 if (target)
6778 return target;
6779 break;
6780 }
6781 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6782 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6783 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6784 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6785 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6787 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6788 ignore, BUILT_IN_NONE);
6789 if (target)
6790 return target;
6791 break;
6792
6793 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6794 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6795 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6796 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6797 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6798 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6799 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6800 ignore, BUILT_IN_NONE);
6801 if (target)
6802 return target;
6803 break;
6804
6805 case BUILT_IN_ATOMIC_FETCH_AND_1:
6806 case BUILT_IN_ATOMIC_FETCH_AND_2:
6807 case BUILT_IN_ATOMIC_FETCH_AND_4:
6808 case BUILT_IN_ATOMIC_FETCH_AND_8:
6809 case BUILT_IN_ATOMIC_FETCH_AND_16:
6810 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6811 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6812 ignore, BUILT_IN_NONE);
6813 if (target)
6814 return target;
6815 break;
6816
6817 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6818 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6819 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6820 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6821 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6822 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6823 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6824 ignore, BUILT_IN_NONE);
6825 if (target)
6826 return target;
6827 break;
6828
6829 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6830 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6831 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6832 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6833 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6834 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6835 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6836 ignore, BUILT_IN_NONE);
6837 if (target)
6838 return target;
6839 break;
6840
6841 case BUILT_IN_ATOMIC_FETCH_OR_1:
6842 case BUILT_IN_ATOMIC_FETCH_OR_2:
6843 case BUILT_IN_ATOMIC_FETCH_OR_4:
6844 case BUILT_IN_ATOMIC_FETCH_OR_8:
6845 case BUILT_IN_ATOMIC_FETCH_OR_16:
6846 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6847 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6848 ignore, BUILT_IN_NONE);
6849 if (target)
6850 return target;
6851 break;
6852
6853 case BUILT_IN_ATOMIC_TEST_AND_SET:
6854 return expand_builtin_atomic_test_and_set (exp, target);
6855
6856 case BUILT_IN_ATOMIC_CLEAR:
6857 return expand_builtin_atomic_clear (exp);
6858
6859 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6860 return expand_builtin_atomic_always_lock_free (exp);
6861
6862 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6863 target = expand_builtin_atomic_is_lock_free (exp);
6864 if (target)
6865 return target;
6866 break;
6867
6868 case BUILT_IN_ATOMIC_THREAD_FENCE:
6869 expand_builtin_atomic_thread_fence (exp);
6870 return const0_rtx;
6871
6872 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6873 expand_builtin_atomic_signal_fence (exp);
6874 return const0_rtx;
6875
6876 case BUILT_IN_OBJECT_SIZE:
6877 return expand_builtin_object_size (exp);
6878
6879 case BUILT_IN_MEMCPY_CHK:
6880 case BUILT_IN_MEMPCPY_CHK:
6881 case BUILT_IN_MEMMOVE_CHK:
6882 case BUILT_IN_MEMSET_CHK:
6883 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6884 if (target)
6885 return target;
6886 break;
6887
6888 case BUILT_IN_STRCPY_CHK:
6889 case BUILT_IN_STPCPY_CHK:
6890 case BUILT_IN_STRNCPY_CHK:
6891 case BUILT_IN_STPNCPY_CHK:
6892 case BUILT_IN_STRCAT_CHK:
6893 case BUILT_IN_STRNCAT_CHK:
6894 case BUILT_IN_SNPRINTF_CHK:
6895 case BUILT_IN_VSNPRINTF_CHK:
6896 maybe_emit_chk_warning (exp, fcode);
6897 break;
6898
6899 case BUILT_IN_SPRINTF_CHK:
6900 case BUILT_IN_VSPRINTF_CHK:
6901 maybe_emit_sprintf_chk_warning (exp, fcode);
6902 break;
6903
6904 case BUILT_IN_FREE:
6905 if (warn_free_nonheap_object)
6906 maybe_emit_free_warning (exp);
6907 break;
6908
6909 case BUILT_IN_THREAD_POINTER:
6910 return expand_builtin_thread_pointer (exp, target);
6911
6912 case BUILT_IN_SET_THREAD_POINTER:
6913 expand_builtin_set_thread_pointer (exp);
6914 return const0_rtx;
6915
6916 case BUILT_IN_CILK_DETACH:
6917 expand_builtin_cilk_detach (exp);
6918 return const0_rtx;
6919
6920 case BUILT_IN_CILK_POP_FRAME:
6921 expand_builtin_cilk_pop_frame (exp);
6922 return const0_rtx;
6923
6924 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6925 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6926 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6927 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6928 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6929 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6930 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6931 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6932 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6933 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6934 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6935 /* We allow user CHKP builtins if Pointer Bounds
6936 Checker is off. */
6937 if (!chkp_function_instrumented_p (current_function_decl))
6938 {
6939 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6940 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6941 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6942 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6943 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6944 return expand_normal (CALL_EXPR_ARG (exp, 0));
6945 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6946 return expand_normal (size_zero_node);
6947 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6948 return expand_normal (size_int (-1));
6949 else
6950 return const0_rtx;
6951 }
6952 /* FALLTHROUGH */
6953
6954 case BUILT_IN_CHKP_BNDMK:
6955 case BUILT_IN_CHKP_BNDSTX:
6956 case BUILT_IN_CHKP_BNDCL:
6957 case BUILT_IN_CHKP_BNDCU:
6958 case BUILT_IN_CHKP_BNDLDX:
6959 case BUILT_IN_CHKP_BNDRET:
6960 case BUILT_IN_CHKP_INTERSECT:
6961 case BUILT_IN_CHKP_NARROW:
6962 case BUILT_IN_CHKP_EXTRACT_LOWER:
6963 case BUILT_IN_CHKP_EXTRACT_UPPER:
6964 /* Software implementation of Pointer Bounds Checker is NYI.
6965 Target support is required. */
6966 error ("Your target platform does not support -fcheck-pointer-bounds");
6967 break;
6968
6969 case BUILT_IN_ACC_ON_DEVICE:
6970 /* Do library call, if we failed to expand the builtin when
6971 folding. */
6972 break;
6973
6974 default: /* just do library call, if unknown builtin */
6975 break;
6976 }
6977
6978 /* The switch statement above can drop through to cause the function
6979 to be called normally. */
6980 return expand_call (exp, target, ignore);
6981 }
6982
6983 /* Similar to expand_builtin but is used for instrumented calls. */
6984
6985 rtx
6986 expand_builtin_with_bounds (tree exp, rtx target,
6987 rtx subtarget ATTRIBUTE_UNUSED,
6988 machine_mode mode, int ignore)
6989 {
6990 tree fndecl = get_callee_fndecl (exp);
6991 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6992
6993 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6994
6995 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6996 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6997
6998 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6999 && fcode < END_CHKP_BUILTINS);
7000
7001 switch (fcode)
7002 {
7003 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7004 target = expand_builtin_memcpy_with_bounds (exp, target);
7005 if (target)
7006 return target;
7007 break;
7008
7009 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7010 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7011 if (target)
7012 return target;
7013 break;
7014
7015 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7016 target = expand_builtin_memset_with_bounds (exp, target, mode);
7017 if (target)
7018 return target;
7019 break;
7020
7021 default:
7022 break;
7023 }
7024
7025 /* The switch statement above can drop through to cause the function
7026 to be called normally. */
7027 return expand_call (exp, target, ignore);
7028 }
7029
7030 /* Determine whether a tree node represents a call to a built-in
7031 function. If the tree T is a call to a built-in function with
7032 the right number of arguments of the appropriate types, return
7033 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7034 Otherwise the return value is END_BUILTINS. */
7035
7036 enum built_in_function
7037 builtin_mathfn_code (const_tree t)
7038 {
7039 const_tree fndecl, arg, parmlist;
7040 const_tree argtype, parmtype;
7041 const_call_expr_arg_iterator iter;
7042
7043 if (TREE_CODE (t) != CALL_EXPR
7044 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7045 return END_BUILTINS;
7046
7047 fndecl = get_callee_fndecl (t);
7048 if (fndecl == NULL_TREE
7049 || TREE_CODE (fndecl) != FUNCTION_DECL
7050 || ! DECL_BUILT_IN (fndecl)
7051 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7052 return END_BUILTINS;
7053
7054 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7055 init_const_call_expr_arg_iterator (t, &iter);
7056 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7057 {
7058 /* If a function doesn't take a variable number of arguments,
7059 the last element in the list will have type `void'. */
7060 parmtype = TREE_VALUE (parmlist);
7061 if (VOID_TYPE_P (parmtype))
7062 {
7063 if (more_const_call_expr_args_p (&iter))
7064 return END_BUILTINS;
7065 return DECL_FUNCTION_CODE (fndecl);
7066 }
7067
7068 if (! more_const_call_expr_args_p (&iter))
7069 return END_BUILTINS;
7070
7071 arg = next_const_call_expr_arg (&iter);
7072 argtype = TREE_TYPE (arg);
7073
7074 if (SCALAR_FLOAT_TYPE_P (parmtype))
7075 {
7076 if (! SCALAR_FLOAT_TYPE_P (argtype))
7077 return END_BUILTINS;
7078 }
7079 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7080 {
7081 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7082 return END_BUILTINS;
7083 }
7084 else if (POINTER_TYPE_P (parmtype))
7085 {
7086 if (! POINTER_TYPE_P (argtype))
7087 return END_BUILTINS;
7088 }
7089 else if (INTEGRAL_TYPE_P (parmtype))
7090 {
7091 if (! INTEGRAL_TYPE_P (argtype))
7092 return END_BUILTINS;
7093 }
7094 else
7095 return END_BUILTINS;
7096 }
7097
7098 /* Variable-length argument list. */
7099 return DECL_FUNCTION_CODE (fndecl);
7100 }
7101
7102 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7103 evaluate to a constant. */
7104
7105 static tree
7106 fold_builtin_constant_p (tree arg)
7107 {
7108 /* We return 1 for a numeric type that's known to be a constant
7109 value at compile-time or for an aggregate type that's a
7110 literal constant. */
7111 STRIP_NOPS (arg);
7112
7113 /* If we know this is a constant, emit the constant of one. */
7114 if (CONSTANT_CLASS_P (arg)
7115 || (TREE_CODE (arg) == CONSTRUCTOR
7116 && TREE_CONSTANT (arg)))
7117 return integer_one_node;
7118 if (TREE_CODE (arg) == ADDR_EXPR)
7119 {
7120 tree op = TREE_OPERAND (arg, 0);
7121 if (TREE_CODE (op) == STRING_CST
7122 || (TREE_CODE (op) == ARRAY_REF
7123 && integer_zerop (TREE_OPERAND (op, 1))
7124 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7125 return integer_one_node;
7126 }
7127
7128 /* If this expression has side effects, show we don't know it to be a
7129 constant. Likewise if it's a pointer or aggregate type since in
7130 those case we only want literals, since those are only optimized
7131 when generating RTL, not later.
7132 And finally, if we are compiling an initializer, not code, we
7133 need to return a definite result now; there's not going to be any
7134 more optimization done. */
7135 if (TREE_SIDE_EFFECTS (arg)
7136 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7137 || POINTER_TYPE_P (TREE_TYPE (arg))
7138 || cfun == 0
7139 || folding_initializer
7140 || force_folding_builtin_constant_p)
7141 return integer_zero_node;
7142
7143 return NULL_TREE;
7144 }
7145
7146 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7147 return it as a truthvalue. */
7148
7149 static tree
7150 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7151 tree predictor)
7152 {
7153 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7154
7155 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7156 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7157 ret_type = TREE_TYPE (TREE_TYPE (fn));
7158 pred_type = TREE_VALUE (arg_types);
7159 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7160
7161 pred = fold_convert_loc (loc, pred_type, pred);
7162 expected = fold_convert_loc (loc, expected_type, expected);
7163 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7164 predictor);
7165
7166 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7167 build_int_cst (ret_type, 0));
7168 }
7169
7170 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7171 NULL_TREE if no simplification is possible. */
7172
7173 tree
7174 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7175 {
7176 tree inner, fndecl, inner_arg0;
7177 enum tree_code code;
7178
7179 /* Distribute the expected value over short-circuiting operators.
7180 See through the cast from truthvalue_type_node to long. */
7181 inner_arg0 = arg0;
7182 while (CONVERT_EXPR_P (inner_arg0)
7183 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7184 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7185 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7186
7187 /* If this is a builtin_expect within a builtin_expect keep the
7188 inner one. See through a comparison against a constant. It
7189 might have been added to create a thruthvalue. */
7190 inner = inner_arg0;
7191
7192 if (COMPARISON_CLASS_P (inner)
7193 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7194 inner = TREE_OPERAND (inner, 0);
7195
7196 if (TREE_CODE (inner) == CALL_EXPR
7197 && (fndecl = get_callee_fndecl (inner))
7198 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7199 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7200 return arg0;
7201
7202 inner = inner_arg0;
7203 code = TREE_CODE (inner);
7204 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7205 {
7206 tree op0 = TREE_OPERAND (inner, 0);
7207 tree op1 = TREE_OPERAND (inner, 1);
7208
7209 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7210 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7211 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7212
7213 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7214 }
7215
7216 /* If the argument isn't invariant then there's nothing else we can do. */
7217 if (!TREE_CONSTANT (inner_arg0))
7218 return NULL_TREE;
7219
7220 /* If we expect that a comparison against the argument will fold to
7221 a constant return the constant. In practice, this means a true
7222 constant or the address of a non-weak symbol. */
7223 inner = inner_arg0;
7224 STRIP_NOPS (inner);
7225 if (TREE_CODE (inner) == ADDR_EXPR)
7226 {
7227 do
7228 {
7229 inner = TREE_OPERAND (inner, 0);
7230 }
7231 while (TREE_CODE (inner) == COMPONENT_REF
7232 || TREE_CODE (inner) == ARRAY_REF);
7233 if ((TREE_CODE (inner) == VAR_DECL
7234 || TREE_CODE (inner) == FUNCTION_DECL)
7235 && DECL_WEAK (inner))
7236 return NULL_TREE;
7237 }
7238
7239 /* Otherwise, ARG0 already has the proper type for the return value. */
7240 return arg0;
7241 }
7242
7243 /* Fold a call to __builtin_classify_type with argument ARG. */
7244
7245 static tree
7246 fold_builtin_classify_type (tree arg)
7247 {
7248 if (arg == 0)
7249 return build_int_cst (integer_type_node, no_type_class);
7250
7251 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7252 }
7253
7254 /* Fold a call to __builtin_strlen with argument ARG. */
7255
7256 static tree
7257 fold_builtin_strlen (location_t loc, tree type, tree arg)
7258 {
7259 if (!validate_arg (arg, POINTER_TYPE))
7260 return NULL_TREE;
7261 else
7262 {
7263 tree len = c_strlen (arg, 0);
7264
7265 if (len)
7266 return fold_convert_loc (loc, type, len);
7267
7268 return NULL_TREE;
7269 }
7270 }
7271
7272 /* If ARG is a foldable constant real, use FN to round it to an integer
7273 value and try to represent the result in integer type ITYPE. Return
7274 the value on success, otherwise return null. */
7275
7276 static tree
7277 do_real_to_int_conversion (tree itype, tree arg,
7278 void (*fn) (REAL_VALUE_TYPE *, machine_mode,
7279 const REAL_VALUE_TYPE *))
7280 {
7281 if (TREE_CODE (arg) != REAL_CST || TREE_OVERFLOW (arg))
7282 return NULL_TREE;
7283
7284 const REAL_VALUE_TYPE *value = TREE_REAL_CST_PTR (arg);
7285 if (!real_isfinite (value))
7286 return NULL_TREE;
7287
7288 tree ftype = TREE_TYPE (arg);
7289 REAL_VALUE_TYPE rounded;
7290 fn (&rounded, TYPE_MODE (ftype), value);
7291
7292 bool fail = false;
7293 wide_int ival = real_to_integer (&rounded, &fail, TYPE_PRECISION (itype));
7294 if (fail)
7295 return NULL_TREE;
7296
7297 return wide_int_to_tree (itype, ival);
7298 }
7299
7300
7301 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7302
7303 static tree
7304 fold_builtin_inf (location_t loc, tree type, int warn)
7305 {
7306 REAL_VALUE_TYPE real;
7307
7308 /* __builtin_inff is intended to be usable to define INFINITY on all
7309 targets. If an infinity is not available, INFINITY expands "to a
7310 positive constant of type float that overflows at translation
7311 time", footnote "In this case, using INFINITY will violate the
7312 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7313 Thus we pedwarn to ensure this constraint violation is
7314 diagnosed. */
7315 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7316 pedwarn (loc, 0, "target format does not support infinity");
7317
7318 real_inf (&real);
7319 return build_real (type, real);
7320 }
7321
7322 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7323
7324 static tree
7325 fold_builtin_nan (tree arg, tree type, int quiet)
7326 {
7327 REAL_VALUE_TYPE real;
7328 const char *str;
7329
7330 if (!validate_arg (arg, POINTER_TYPE))
7331 return NULL_TREE;
7332 str = c_getstr (arg);
7333 if (!str)
7334 return NULL_TREE;
7335
7336 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7337 return NULL_TREE;
7338
7339 return build_real (type, real);
7340 }
7341
7342 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7343 NULL_TREE if no simplification can be made. */
7344
7345 static tree
7346 fold_builtin_sincos (location_t loc,
7347 tree arg0, tree arg1, tree arg2)
7348 {
7349 tree type;
7350 tree res, fn, call;
7351
7352 if (!validate_arg (arg0, REAL_TYPE)
7353 || !validate_arg (arg1, POINTER_TYPE)
7354 || !validate_arg (arg2, POINTER_TYPE))
7355 return NULL_TREE;
7356
7357 type = TREE_TYPE (arg0);
7358
7359 /* Calculate the result when the argument is a constant. */
7360 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7361 return res;
7362
7363 /* Canonicalize sincos to cexpi. */
7364 if (!targetm.libc_has_function (function_c99_math_complex))
7365 return NULL_TREE;
7366 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7367 if (!fn)
7368 return NULL_TREE;
7369
7370 call = build_call_expr_loc (loc, fn, 1, arg0);
7371 call = builtin_save_expr (call);
7372
7373 return build2 (COMPOUND_EXPR, void_type_node,
7374 build2 (MODIFY_EXPR, void_type_node,
7375 build_fold_indirect_ref_loc (loc, arg1),
7376 build1 (IMAGPART_EXPR, type, call)),
7377 build2 (MODIFY_EXPR, void_type_node,
7378 build_fold_indirect_ref_loc (loc, arg2),
7379 build1 (REALPART_EXPR, type, call)));
7380 }
7381
7382 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7383 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7384 the argument to the call. Return NULL_TREE if no simplification can
7385 be made. */
7386
7387 static tree
7388 fold_builtin_bitop (tree fndecl, tree arg)
7389 {
7390 if (!validate_arg (arg, INTEGER_TYPE))
7391 return NULL_TREE;
7392
7393 /* Optimize for constant argument. */
7394 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7395 {
7396 tree type = TREE_TYPE (arg);
7397 int result;
7398
7399 switch (DECL_FUNCTION_CODE (fndecl))
7400 {
7401 CASE_INT_FN (BUILT_IN_FFS):
7402 result = wi::ffs (arg);
7403 break;
7404
7405 CASE_INT_FN (BUILT_IN_CLZ):
7406 if (wi::ne_p (arg, 0))
7407 result = wi::clz (arg);
7408 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7409 result = TYPE_PRECISION (type);
7410 break;
7411
7412 CASE_INT_FN (BUILT_IN_CTZ):
7413 if (wi::ne_p (arg, 0))
7414 result = wi::ctz (arg);
7415 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7416 result = TYPE_PRECISION (type);
7417 break;
7418
7419 CASE_INT_FN (BUILT_IN_CLRSB):
7420 result = wi::clrsb (arg);
7421 break;
7422
7423 CASE_INT_FN (BUILT_IN_POPCOUNT):
7424 result = wi::popcount (arg);
7425 break;
7426
7427 CASE_INT_FN (BUILT_IN_PARITY):
7428 result = wi::parity (arg);
7429 break;
7430
7431 default:
7432 gcc_unreachable ();
7433 }
7434
7435 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7436 }
7437
7438 return NULL_TREE;
7439 }
7440
7441 /* Fold function call to builtin_bswap and the short, long and long long
7442 variants. Return NULL_TREE if no simplification can be made. */
7443 static tree
7444 fold_builtin_bswap (tree fndecl, tree arg)
7445 {
7446 if (! validate_arg (arg, INTEGER_TYPE))
7447 return NULL_TREE;
7448
7449 /* Optimize constant value. */
7450 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7451 {
7452 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7453
7454 switch (DECL_FUNCTION_CODE (fndecl))
7455 {
7456 case BUILT_IN_BSWAP16:
7457 case BUILT_IN_BSWAP32:
7458 case BUILT_IN_BSWAP64:
7459 {
7460 signop sgn = TYPE_SIGN (type);
7461 tree result =
7462 wide_int_to_tree (type,
7463 wide_int::from (arg, TYPE_PRECISION (type),
7464 sgn).bswap ());
7465 return result;
7466 }
7467 default:
7468 gcc_unreachable ();
7469 }
7470 }
7471
7472 return NULL_TREE;
7473 }
7474
7475 /* Fold a builtin function call to pow, powf, or powl. Return
7476 NULL_TREE if no simplification can be made. */
7477 static tree
7478 fold_const_builtin_pow (tree arg0, tree arg1, tree type)
7479 {
7480 tree res;
7481
7482 if (!validate_arg (arg0, REAL_TYPE)
7483 || !validate_arg (arg1, REAL_TYPE))
7484 return NULL_TREE;
7485
7486 /* Calculate the result when the argument is a constant. */
7487 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7488 return res;
7489
7490 /* Check for an integer exponent. */
7491 if (TREE_CODE (arg0) == REAL_CST
7492 && !TREE_OVERFLOW (arg0)
7493 && TREE_CODE (arg1) == REAL_CST
7494 && !TREE_OVERFLOW (arg1))
7495 {
7496 REAL_VALUE_TYPE cint1;
7497 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (arg0);
7498 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (arg1);
7499 HOST_WIDE_INT n1 = real_to_integer (c1);
7500 real_from_integer (&cint1, VOIDmode, n1, SIGNED);
7501 /* Attempt to evaluate pow at compile-time, unless this should
7502 raise an exception. */
7503 if (real_identical (c1, &cint1)
7504 && (n1 > 0
7505 || (!flag_trapping_math && !flag_errno_math)
7506 || !real_equal (c0, &dconst0)))
7507 {
7508 REAL_VALUE_TYPE x;
7509 bool inexact = real_powi (&x, TYPE_MODE (type), c0, n1);
7510 if (flag_unsafe_math_optimizations || !inexact)
7511 return build_real (type, x);
7512 }
7513 }
7514
7515 return NULL_TREE;
7516 }
7517
7518 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7519 arguments to the call, and TYPE is its return type.
7520 Return NULL_TREE if no simplification can be made. */
7521
7522 static tree
7523 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7524 {
7525 if (!validate_arg (arg1, POINTER_TYPE)
7526 || !validate_arg (arg2, INTEGER_TYPE)
7527 || !validate_arg (len, INTEGER_TYPE))
7528 return NULL_TREE;
7529 else
7530 {
7531 const char *p1;
7532
7533 if (TREE_CODE (arg2) != INTEGER_CST
7534 || !tree_fits_uhwi_p (len))
7535 return NULL_TREE;
7536
7537 p1 = c_getstr (arg1);
7538 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7539 {
7540 char c;
7541 const char *r;
7542 tree tem;
7543
7544 if (target_char_cast (arg2, &c))
7545 return NULL_TREE;
7546
7547 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7548
7549 if (r == NULL)
7550 return build_int_cst (TREE_TYPE (arg1), 0);
7551
7552 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7553 return fold_convert_loc (loc, type, tem);
7554 }
7555 return NULL_TREE;
7556 }
7557 }
7558
7559 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7560 Return NULL_TREE if no simplification can be made. */
7561
7562 static tree
7563 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7564 {
7565 const char *p1, *p2;
7566
7567 if (!validate_arg (arg1, POINTER_TYPE)
7568 || !validate_arg (arg2, POINTER_TYPE)
7569 || !validate_arg (len, INTEGER_TYPE))
7570 return NULL_TREE;
7571
7572 /* If the LEN parameter is zero, return zero. */
7573 if (integer_zerop (len))
7574 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7575 arg1, arg2);
7576
7577 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7578 if (operand_equal_p (arg1, arg2, 0))
7579 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7580
7581 p1 = c_getstr (arg1);
7582 p2 = c_getstr (arg2);
7583
7584 /* If all arguments are constant, and the value of len is not greater
7585 than the lengths of arg1 and arg2, evaluate at compile-time. */
7586 if (tree_fits_uhwi_p (len) && p1 && p2
7587 && compare_tree_int (len, strlen (p1) + 1) <= 0
7588 && compare_tree_int (len, strlen (p2) + 1) <= 0)
7589 {
7590 const int r = memcmp (p1, p2, tree_to_uhwi (len));
7591
7592 if (r > 0)
7593 return integer_one_node;
7594 else if (r < 0)
7595 return integer_minus_one_node;
7596 else
7597 return integer_zero_node;
7598 }
7599
7600 /* If len parameter is one, return an expression corresponding to
7601 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7602 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7603 {
7604 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7605 tree cst_uchar_ptr_node
7606 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7607
7608 tree ind1
7609 = fold_convert_loc (loc, integer_type_node,
7610 build1 (INDIRECT_REF, cst_uchar_node,
7611 fold_convert_loc (loc,
7612 cst_uchar_ptr_node,
7613 arg1)));
7614 tree ind2
7615 = fold_convert_loc (loc, integer_type_node,
7616 build1 (INDIRECT_REF, cst_uchar_node,
7617 fold_convert_loc (loc,
7618 cst_uchar_ptr_node,
7619 arg2)));
7620 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7621 }
7622
7623 return NULL_TREE;
7624 }
7625
7626 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7627 Return NULL_TREE if no simplification can be made. */
7628
7629 static tree
7630 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7631 {
7632 const char *p1, *p2;
7633
7634 if (!validate_arg (arg1, POINTER_TYPE)
7635 || !validate_arg (arg2, POINTER_TYPE))
7636 return NULL_TREE;
7637
7638 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7639 if (operand_equal_p (arg1, arg2, 0))
7640 return integer_zero_node;
7641
7642 p1 = c_getstr (arg1);
7643 p2 = c_getstr (arg2);
7644
7645 if (p1 && p2)
7646 {
7647 const int i = strcmp (p1, p2);
7648 if (i < 0)
7649 return integer_minus_one_node;
7650 else if (i > 0)
7651 return integer_one_node;
7652 else
7653 return integer_zero_node;
7654 }
7655
7656 /* If the second arg is "", return *(const unsigned char*)arg1. */
7657 if (p2 && *p2 == '\0')
7658 {
7659 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7660 tree cst_uchar_ptr_node
7661 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7662
7663 return fold_convert_loc (loc, integer_type_node,
7664 build1 (INDIRECT_REF, cst_uchar_node,
7665 fold_convert_loc (loc,
7666 cst_uchar_ptr_node,
7667 arg1)));
7668 }
7669
7670 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7671 if (p1 && *p1 == '\0')
7672 {
7673 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7674 tree cst_uchar_ptr_node
7675 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7676
7677 tree temp
7678 = fold_convert_loc (loc, integer_type_node,
7679 build1 (INDIRECT_REF, cst_uchar_node,
7680 fold_convert_loc (loc,
7681 cst_uchar_ptr_node,
7682 arg2)));
7683 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7684 }
7685
7686 return NULL_TREE;
7687 }
7688
7689 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7690 Return NULL_TREE if no simplification can be made. */
7691
7692 static tree
7693 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7694 {
7695 const char *p1, *p2;
7696
7697 if (!validate_arg (arg1, POINTER_TYPE)
7698 || !validate_arg (arg2, POINTER_TYPE)
7699 || !validate_arg (len, INTEGER_TYPE))
7700 return NULL_TREE;
7701
7702 /* If the LEN parameter is zero, return zero. */
7703 if (integer_zerop (len))
7704 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7705 arg1, arg2);
7706
7707 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7708 if (operand_equal_p (arg1, arg2, 0))
7709 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7710
7711 p1 = c_getstr (arg1);
7712 p2 = c_getstr (arg2);
7713
7714 if (tree_fits_uhwi_p (len) && p1 && p2)
7715 {
7716 const int i = strncmp (p1, p2, tree_to_uhwi (len));
7717 if (i > 0)
7718 return integer_one_node;
7719 else if (i < 0)
7720 return integer_minus_one_node;
7721 else
7722 return integer_zero_node;
7723 }
7724
7725 /* If the second arg is "", and the length is greater than zero,
7726 return *(const unsigned char*)arg1. */
7727 if (p2 && *p2 == '\0'
7728 && TREE_CODE (len) == INTEGER_CST
7729 && tree_int_cst_sgn (len) == 1)
7730 {
7731 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7732 tree cst_uchar_ptr_node
7733 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7734
7735 return fold_convert_loc (loc, integer_type_node,
7736 build1 (INDIRECT_REF, cst_uchar_node,
7737 fold_convert_loc (loc,
7738 cst_uchar_ptr_node,
7739 arg1)));
7740 }
7741
7742 /* If the first arg is "", and the length is greater than zero,
7743 return -*(const unsigned char*)arg2. */
7744 if (p1 && *p1 == '\0'
7745 && TREE_CODE (len) == INTEGER_CST
7746 && tree_int_cst_sgn (len) == 1)
7747 {
7748 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7749 tree cst_uchar_ptr_node
7750 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7751
7752 tree temp = fold_convert_loc (loc, integer_type_node,
7753 build1 (INDIRECT_REF, cst_uchar_node,
7754 fold_convert_loc (loc,
7755 cst_uchar_ptr_node,
7756 arg2)));
7757 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7758 }
7759
7760 /* If len parameter is one, return an expression corresponding to
7761 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7762 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7763 {
7764 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7765 tree cst_uchar_ptr_node
7766 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7767
7768 tree ind1 = fold_convert_loc (loc, integer_type_node,
7769 build1 (INDIRECT_REF, cst_uchar_node,
7770 fold_convert_loc (loc,
7771 cst_uchar_ptr_node,
7772 arg1)));
7773 tree ind2 = fold_convert_loc (loc, integer_type_node,
7774 build1 (INDIRECT_REF, cst_uchar_node,
7775 fold_convert_loc (loc,
7776 cst_uchar_ptr_node,
7777 arg2)));
7778 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7779 }
7780
7781 return NULL_TREE;
7782 }
7783
7784 /* Fold a call to builtin isascii with argument ARG. */
7785
7786 static tree
7787 fold_builtin_isascii (location_t loc, tree arg)
7788 {
7789 if (!validate_arg (arg, INTEGER_TYPE))
7790 return NULL_TREE;
7791 else
7792 {
7793 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7794 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7795 build_int_cst (integer_type_node,
7796 ~ (unsigned HOST_WIDE_INT) 0x7f));
7797 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7798 arg, integer_zero_node);
7799 }
7800 }
7801
7802 /* Fold a call to builtin toascii with argument ARG. */
7803
7804 static tree
7805 fold_builtin_toascii (location_t loc, tree arg)
7806 {
7807 if (!validate_arg (arg, INTEGER_TYPE))
7808 return NULL_TREE;
7809
7810 /* Transform toascii(c) -> (c & 0x7f). */
7811 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7812 build_int_cst (integer_type_node, 0x7f));
7813 }
7814
7815 /* Fold a call to builtin isdigit with argument ARG. */
7816
7817 static tree
7818 fold_builtin_isdigit (location_t loc, tree arg)
7819 {
7820 if (!validate_arg (arg, INTEGER_TYPE))
7821 return NULL_TREE;
7822 else
7823 {
7824 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7825 /* According to the C standard, isdigit is unaffected by locale.
7826 However, it definitely is affected by the target character set. */
7827 unsigned HOST_WIDE_INT target_digit0
7828 = lang_hooks.to_target_charset ('0');
7829
7830 if (target_digit0 == 0)
7831 return NULL_TREE;
7832
7833 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7834 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7835 build_int_cst (unsigned_type_node, target_digit0));
7836 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7837 build_int_cst (unsigned_type_node, 9));
7838 }
7839 }
7840
7841 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7842
7843 static tree
7844 fold_builtin_fabs (location_t loc, tree arg, tree type)
7845 {
7846 if (!validate_arg (arg, REAL_TYPE))
7847 return NULL_TREE;
7848
7849 arg = fold_convert_loc (loc, type, arg);
7850 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7851 }
7852
7853 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7854
7855 static tree
7856 fold_builtin_abs (location_t loc, tree arg, tree type)
7857 {
7858 if (!validate_arg (arg, INTEGER_TYPE))
7859 return NULL_TREE;
7860
7861 arg = fold_convert_loc (loc, type, arg);
7862 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7863 }
7864
7865 /* Fold a fma operation with arguments ARG[012]. */
7866
7867 tree
7868 fold_fma (location_t loc ATTRIBUTE_UNUSED,
7869 tree type, tree arg0, tree arg1, tree arg2)
7870 {
7871 if (TREE_CODE (arg0) == REAL_CST
7872 && TREE_CODE (arg1) == REAL_CST
7873 && TREE_CODE (arg2) == REAL_CST)
7874 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
7875
7876 return NULL_TREE;
7877 }
7878
7879 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7880
7881 static tree
7882 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7883 {
7884 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7885 if (validate_arg (arg0, REAL_TYPE)
7886 && validate_arg (arg1, REAL_TYPE)
7887 && validate_arg (arg2, REAL_TYPE)
7888 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7889 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7890
7891 return NULL_TREE;
7892 }
7893
7894 /* Fold a call to builtin fmin or fmax. */
7895
7896 static tree
7897 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
7898 tree type, bool max)
7899 {
7900 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
7901 {
7902 /* Calculate the result when the argument is a constant. */
7903 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
7904
7905 if (res)
7906 return res;
7907
7908 /* If either argument is NaN, return the other one. Avoid the
7909 transformation if we get (and honor) a signalling NaN. Using
7910 omit_one_operand() ensures we create a non-lvalue. */
7911 if (TREE_CODE (arg0) == REAL_CST
7912 && real_isnan (&TREE_REAL_CST (arg0))
7913 && (! HONOR_SNANS (arg0)
7914 || ! TREE_REAL_CST (arg0).signalling))
7915 return omit_one_operand_loc (loc, type, arg1, arg0);
7916 if (TREE_CODE (arg1) == REAL_CST
7917 && real_isnan (&TREE_REAL_CST (arg1))
7918 && (! HONOR_SNANS (arg1)
7919 || ! TREE_REAL_CST (arg1).signalling))
7920 return omit_one_operand_loc (loc, type, arg0, arg1);
7921
7922 /* Transform fmin/fmax(x,x) -> x. */
7923 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7924 return omit_one_operand_loc (loc, type, arg0, arg1);
7925
7926 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
7927 functions to return the numeric arg if the other one is NaN.
7928 These tree codes don't honor that, so only transform if
7929 -ffinite-math-only is set. C99 doesn't require -0.0 to be
7930 handled, so we don't have to worry about it either. */
7931 if (flag_finite_math_only)
7932 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
7933 fold_convert_loc (loc, type, arg0),
7934 fold_convert_loc (loc, type, arg1));
7935 }
7936 return NULL_TREE;
7937 }
7938
7939 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7940
7941 static tree
7942 fold_builtin_carg (location_t loc, tree arg, tree type)
7943 {
7944 if (validate_arg (arg, COMPLEX_TYPE)
7945 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7946 {
7947 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7948
7949 if (atan2_fn)
7950 {
7951 tree new_arg = builtin_save_expr (arg);
7952 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7953 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7954 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7955 }
7956 }
7957
7958 return NULL_TREE;
7959 }
7960
7961 /* Fold a call to builtin logb/ilogb. */
7962
7963 static tree
7964 fold_builtin_logb (location_t loc, tree arg, tree rettype)
7965 {
7966 if (! validate_arg (arg, REAL_TYPE))
7967 return NULL_TREE;
7968
7969 STRIP_NOPS (arg);
7970
7971 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
7972 {
7973 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
7974
7975 switch (value->cl)
7976 {
7977 case rvc_nan:
7978 case rvc_inf:
7979 /* If arg is Inf or NaN and we're logb, return it. */
7980 if (TREE_CODE (rettype) == REAL_TYPE)
7981 {
7982 /* For logb(-Inf) we have to return +Inf. */
7983 if (real_isinf (value) && real_isneg (value))
7984 {
7985 REAL_VALUE_TYPE tem;
7986 real_inf (&tem);
7987 return build_real (rettype, tem);
7988 }
7989 return fold_convert_loc (loc, rettype, arg);
7990 }
7991 /* Fall through... */
7992 case rvc_zero:
7993 /* Zero may set errno and/or raise an exception for logb, also
7994 for ilogb we don't know FP_ILOGB0. */
7995 return NULL_TREE;
7996 case rvc_normal:
7997 /* For normal numbers, proceed iff radix == 2. In GCC,
7998 normalized significands are in the range [0.5, 1.0). We
7999 want the exponent as if they were [1.0, 2.0) so get the
8000 exponent and subtract 1. */
8001 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8002 return fold_convert_loc (loc, rettype,
8003 build_int_cst (integer_type_node,
8004 REAL_EXP (value)-1));
8005 break;
8006 }
8007 }
8008
8009 return NULL_TREE;
8010 }
8011
8012 /* Fold a call to builtin significand, if radix == 2. */
8013
8014 static tree
8015 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8016 {
8017 if (! validate_arg (arg, REAL_TYPE))
8018 return NULL_TREE;
8019
8020 STRIP_NOPS (arg);
8021
8022 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8023 {
8024 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8025
8026 switch (value->cl)
8027 {
8028 case rvc_zero:
8029 case rvc_nan:
8030 case rvc_inf:
8031 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8032 return fold_convert_loc (loc, rettype, arg);
8033 case rvc_normal:
8034 /* For normal numbers, proceed iff radix == 2. */
8035 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8036 {
8037 REAL_VALUE_TYPE result = *value;
8038 /* In GCC, normalized significands are in the range [0.5,
8039 1.0). We want them to be [1.0, 2.0) so set the
8040 exponent to 1. */
8041 SET_REAL_EXP (&result, 1);
8042 return build_real (rettype, result);
8043 }
8044 break;
8045 }
8046 }
8047
8048 return NULL_TREE;
8049 }
8050
8051 /* Fold a call to builtin frexp, we can assume the base is 2. */
8052
8053 static tree
8054 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8055 {
8056 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8057 return NULL_TREE;
8058
8059 STRIP_NOPS (arg0);
8060
8061 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8062 return NULL_TREE;
8063
8064 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8065
8066 /* Proceed if a valid pointer type was passed in. */
8067 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8068 {
8069 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8070 tree frac, exp;
8071
8072 switch (value->cl)
8073 {
8074 case rvc_zero:
8075 /* For +-0, return (*exp = 0, +-0). */
8076 exp = integer_zero_node;
8077 frac = arg0;
8078 break;
8079 case rvc_nan:
8080 case rvc_inf:
8081 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8082 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8083 case rvc_normal:
8084 {
8085 /* Since the frexp function always expects base 2, and in
8086 GCC normalized significands are already in the range
8087 [0.5, 1.0), we have exactly what frexp wants. */
8088 REAL_VALUE_TYPE frac_rvt = *value;
8089 SET_REAL_EXP (&frac_rvt, 0);
8090 frac = build_real (rettype, frac_rvt);
8091 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8092 }
8093 break;
8094 default:
8095 gcc_unreachable ();
8096 }
8097
8098 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8099 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8100 TREE_SIDE_EFFECTS (arg1) = 1;
8101 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8102 }
8103
8104 return NULL_TREE;
8105 }
8106
8107 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8108 then we can assume the base is two. If it's false, then we have to
8109 check the mode of the TYPE parameter in certain cases. */
8110
8111 static tree
8112 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
8113 tree type, bool ldexp)
8114 {
8115 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
8116 {
8117 STRIP_NOPS (arg0);
8118 STRIP_NOPS (arg1);
8119
8120 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
8121 if (real_zerop (arg0) || integer_zerop (arg1)
8122 || (TREE_CODE (arg0) == REAL_CST
8123 && !real_isfinite (&TREE_REAL_CST (arg0))))
8124 return omit_one_operand_loc (loc, type, arg0, arg1);
8125
8126 /* If both arguments are constant, then try to evaluate it. */
8127 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
8128 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
8129 && tree_fits_shwi_p (arg1))
8130 {
8131 /* Bound the maximum adjustment to twice the range of the
8132 mode's valid exponents. Use abs to ensure the range is
8133 positive as a sanity check. */
8134 const long max_exp_adj = 2 *
8135 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
8136 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
8137
8138 /* Get the user-requested adjustment. */
8139 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
8140
8141 /* The requested adjustment must be inside this range. This
8142 is a preliminary cap to avoid things like overflow, we
8143 may still fail to compute the result for other reasons. */
8144 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
8145 {
8146 REAL_VALUE_TYPE initial_result;
8147
8148 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
8149
8150 /* Ensure we didn't overflow. */
8151 if (! real_isinf (&initial_result))
8152 {
8153 const REAL_VALUE_TYPE trunc_result
8154 = real_value_truncate (TYPE_MODE (type), initial_result);
8155
8156 /* Only proceed if the target mode can hold the
8157 resulting value. */
8158 if (real_equal (&initial_result, &trunc_result))
8159 return build_real (type, trunc_result);
8160 }
8161 }
8162 }
8163 }
8164
8165 return NULL_TREE;
8166 }
8167
8168 /* Fold a call to builtin modf. */
8169
8170 static tree
8171 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8172 {
8173 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8174 return NULL_TREE;
8175
8176 STRIP_NOPS (arg0);
8177
8178 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8179 return NULL_TREE;
8180
8181 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8182
8183 /* Proceed if a valid pointer type was passed in. */
8184 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8185 {
8186 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8187 REAL_VALUE_TYPE trunc, frac;
8188
8189 switch (value->cl)
8190 {
8191 case rvc_nan:
8192 case rvc_zero:
8193 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8194 trunc = frac = *value;
8195 break;
8196 case rvc_inf:
8197 /* For +-Inf, return (*arg1 = arg0, +-0). */
8198 frac = dconst0;
8199 frac.sign = value->sign;
8200 trunc = *value;
8201 break;
8202 case rvc_normal:
8203 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8204 real_trunc (&trunc, VOIDmode, value);
8205 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8206 /* If the original number was negative and already
8207 integral, then the fractional part is -0.0. */
8208 if (value->sign && frac.cl == rvc_zero)
8209 frac.sign = value->sign;
8210 break;
8211 }
8212
8213 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8214 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8215 build_real (rettype, trunc));
8216 TREE_SIDE_EFFECTS (arg1) = 1;
8217 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8218 build_real (rettype, frac));
8219 }
8220
8221 return NULL_TREE;
8222 }
8223
8224 /* Given a location LOC, an interclass builtin function decl FNDECL
8225 and its single argument ARG, return an folded expression computing
8226 the same, or NULL_TREE if we either couldn't or didn't want to fold
8227 (the latter happen if there's an RTL instruction available). */
8228
8229 static tree
8230 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8231 {
8232 machine_mode mode;
8233
8234 if (!validate_arg (arg, REAL_TYPE))
8235 return NULL_TREE;
8236
8237 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8238 return NULL_TREE;
8239
8240 mode = TYPE_MODE (TREE_TYPE (arg));
8241
8242 /* If there is no optab, try generic code. */
8243 switch (DECL_FUNCTION_CODE (fndecl))
8244 {
8245 tree result;
8246
8247 CASE_FLT_FN (BUILT_IN_ISINF):
8248 {
8249 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8250 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8251 tree const type = TREE_TYPE (arg);
8252 REAL_VALUE_TYPE r;
8253 char buf[128];
8254
8255 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8256 real_from_string (&r, buf);
8257 result = build_call_expr (isgr_fn, 2,
8258 fold_build1_loc (loc, ABS_EXPR, type, arg),
8259 build_real (type, r));
8260 return result;
8261 }
8262 CASE_FLT_FN (BUILT_IN_FINITE):
8263 case BUILT_IN_ISFINITE:
8264 {
8265 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8266 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8267 tree const type = TREE_TYPE (arg);
8268 REAL_VALUE_TYPE r;
8269 char buf[128];
8270
8271 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8272 real_from_string (&r, buf);
8273 result = build_call_expr (isle_fn, 2,
8274 fold_build1_loc (loc, ABS_EXPR, type, arg),
8275 build_real (type, r));
8276 /*result = fold_build2_loc (loc, UNGT_EXPR,
8277 TREE_TYPE (TREE_TYPE (fndecl)),
8278 fold_build1_loc (loc, ABS_EXPR, type, arg),
8279 build_real (type, r));
8280 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8281 TREE_TYPE (TREE_TYPE (fndecl)),
8282 result);*/
8283 return result;
8284 }
8285 case BUILT_IN_ISNORMAL:
8286 {
8287 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8288 islessequal(fabs(x),DBL_MAX). */
8289 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8290 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8291 tree const type = TREE_TYPE (arg);
8292 REAL_VALUE_TYPE rmax, rmin;
8293 char buf[128];
8294
8295 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8296 real_from_string (&rmax, buf);
8297 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8298 real_from_string (&rmin, buf);
8299 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8300 result = build_call_expr (isle_fn, 2, arg,
8301 build_real (type, rmax));
8302 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
8303 build_call_expr (isge_fn, 2, arg,
8304 build_real (type, rmin)));
8305 return result;
8306 }
8307 default:
8308 break;
8309 }
8310
8311 return NULL_TREE;
8312 }
8313
8314 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8315 ARG is the argument for the call. */
8316
8317 static tree
8318 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8319 {
8320 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8321 REAL_VALUE_TYPE r;
8322
8323 if (!validate_arg (arg, REAL_TYPE))
8324 return NULL_TREE;
8325
8326 switch (builtin_index)
8327 {
8328 case BUILT_IN_ISINF:
8329 if (!HONOR_INFINITIES (arg))
8330 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8331
8332 if (TREE_CODE (arg) == REAL_CST)
8333 {
8334 r = TREE_REAL_CST (arg);
8335 if (real_isinf (&r))
8336 return real_compare (GT_EXPR, &r, &dconst0)
8337 ? integer_one_node : integer_minus_one_node;
8338 else
8339 return integer_zero_node;
8340 }
8341
8342 return NULL_TREE;
8343
8344 case BUILT_IN_ISINF_SIGN:
8345 {
8346 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8347 /* In a boolean context, GCC will fold the inner COND_EXPR to
8348 1. So e.g. "if (isinf_sign(x))" would be folded to just
8349 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8350 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
8351 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8352 tree tmp = NULL_TREE;
8353
8354 arg = builtin_save_expr (arg);
8355
8356 if (signbit_fn && isinf_fn)
8357 {
8358 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8359 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8360
8361 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8362 signbit_call, integer_zero_node);
8363 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8364 isinf_call, integer_zero_node);
8365
8366 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8367 integer_minus_one_node, integer_one_node);
8368 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8369 isinf_call, tmp,
8370 integer_zero_node);
8371 }
8372
8373 return tmp;
8374 }
8375
8376 case BUILT_IN_ISFINITE:
8377 if (!HONOR_NANS (arg)
8378 && !HONOR_INFINITIES (arg))
8379 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8380
8381 if (TREE_CODE (arg) == REAL_CST)
8382 {
8383 r = TREE_REAL_CST (arg);
8384 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
8385 }
8386
8387 return NULL_TREE;
8388
8389 case BUILT_IN_ISNAN:
8390 if (!HONOR_NANS (arg))
8391 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8392
8393 if (TREE_CODE (arg) == REAL_CST)
8394 {
8395 r = TREE_REAL_CST (arg);
8396 return real_isnan (&r) ? integer_one_node : integer_zero_node;
8397 }
8398
8399 arg = builtin_save_expr (arg);
8400 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8401
8402 default:
8403 gcc_unreachable ();
8404 }
8405 }
8406
8407 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8408 This builtin will generate code to return the appropriate floating
8409 point classification depending on the value of the floating point
8410 number passed in. The possible return values must be supplied as
8411 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8412 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8413 one floating point argument which is "type generic". */
8414
8415 static tree
8416 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8417 {
8418 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8419 arg, type, res, tmp;
8420 machine_mode mode;
8421 REAL_VALUE_TYPE r;
8422 char buf[128];
8423
8424 /* Verify the required arguments in the original call. */
8425 if (nargs != 6
8426 || !validate_arg (args[0], INTEGER_TYPE)
8427 || !validate_arg (args[1], INTEGER_TYPE)
8428 || !validate_arg (args[2], INTEGER_TYPE)
8429 || !validate_arg (args[3], INTEGER_TYPE)
8430 || !validate_arg (args[4], INTEGER_TYPE)
8431 || !validate_arg (args[5], REAL_TYPE))
8432 return NULL_TREE;
8433
8434 fp_nan = args[0];
8435 fp_infinite = args[1];
8436 fp_normal = args[2];
8437 fp_subnormal = args[3];
8438 fp_zero = args[4];
8439 arg = args[5];
8440 type = TREE_TYPE (arg);
8441 mode = TYPE_MODE (type);
8442 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8443
8444 /* fpclassify(x) ->
8445 isnan(x) ? FP_NAN :
8446 (fabs(x) == Inf ? FP_INFINITE :
8447 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8448 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8449
8450 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8451 build_real (type, dconst0));
8452 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8453 tmp, fp_zero, fp_subnormal);
8454
8455 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8456 real_from_string (&r, buf);
8457 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8458 arg, build_real (type, r));
8459 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8460
8461 if (HONOR_INFINITIES (mode))
8462 {
8463 real_inf (&r);
8464 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8465 build_real (type, r));
8466 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8467 fp_infinite, res);
8468 }
8469
8470 if (HONOR_NANS (mode))
8471 {
8472 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8473 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8474 }
8475
8476 return res;
8477 }
8478
8479 /* Fold a call to an unordered comparison function such as
8480 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8481 being called and ARG0 and ARG1 are the arguments for the call.
8482 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8483 the opposite of the desired result. UNORDERED_CODE is used
8484 for modes that can hold NaNs and ORDERED_CODE is used for
8485 the rest. */
8486
8487 static tree
8488 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8489 enum tree_code unordered_code,
8490 enum tree_code ordered_code)
8491 {
8492 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8493 enum tree_code code;
8494 tree type0, type1;
8495 enum tree_code code0, code1;
8496 tree cmp_type = NULL_TREE;
8497
8498 type0 = TREE_TYPE (arg0);
8499 type1 = TREE_TYPE (arg1);
8500
8501 code0 = TREE_CODE (type0);
8502 code1 = TREE_CODE (type1);
8503
8504 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8505 /* Choose the wider of two real types. */
8506 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8507 ? type0 : type1;
8508 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8509 cmp_type = type0;
8510 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8511 cmp_type = type1;
8512
8513 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8514 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8515
8516 if (unordered_code == UNORDERED_EXPR)
8517 {
8518 if (!HONOR_NANS (arg0))
8519 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8520 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8521 }
8522
8523 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8524 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8525 fold_build2_loc (loc, code, type, arg0, arg1));
8526 }
8527
8528 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8529 arithmetics if it can never overflow, or into internal functions that
8530 return both result of arithmetics and overflowed boolean flag in
8531 a complex integer result, or some other check for overflow. */
8532
8533 static tree
8534 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8535 tree arg0, tree arg1, tree arg2)
8536 {
8537 enum internal_fn ifn = IFN_LAST;
8538 tree type = TREE_TYPE (TREE_TYPE (arg2));
8539 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8540 switch (fcode)
8541 {
8542 case BUILT_IN_ADD_OVERFLOW:
8543 case BUILT_IN_SADD_OVERFLOW:
8544 case BUILT_IN_SADDL_OVERFLOW:
8545 case BUILT_IN_SADDLL_OVERFLOW:
8546 case BUILT_IN_UADD_OVERFLOW:
8547 case BUILT_IN_UADDL_OVERFLOW:
8548 case BUILT_IN_UADDLL_OVERFLOW:
8549 ifn = IFN_ADD_OVERFLOW;
8550 break;
8551 case BUILT_IN_SUB_OVERFLOW:
8552 case BUILT_IN_SSUB_OVERFLOW:
8553 case BUILT_IN_SSUBL_OVERFLOW:
8554 case BUILT_IN_SSUBLL_OVERFLOW:
8555 case BUILT_IN_USUB_OVERFLOW:
8556 case BUILT_IN_USUBL_OVERFLOW:
8557 case BUILT_IN_USUBLL_OVERFLOW:
8558 ifn = IFN_SUB_OVERFLOW;
8559 break;
8560 case BUILT_IN_MUL_OVERFLOW:
8561 case BUILT_IN_SMUL_OVERFLOW:
8562 case BUILT_IN_SMULL_OVERFLOW:
8563 case BUILT_IN_SMULLL_OVERFLOW:
8564 case BUILT_IN_UMUL_OVERFLOW:
8565 case BUILT_IN_UMULL_OVERFLOW:
8566 case BUILT_IN_UMULLL_OVERFLOW:
8567 ifn = IFN_MUL_OVERFLOW;
8568 break;
8569 default:
8570 gcc_unreachable ();
8571 }
8572 tree ctype = build_complex_type (type);
8573 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8574 2, arg0, arg1);
8575 tree tgt = save_expr (call);
8576 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8577 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8578 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8579 tree store
8580 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8581 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8582 }
8583
8584 /* Fold a call to built-in function FNDECL with 0 arguments.
8585 This function returns NULL_TREE if no simplification was possible. */
8586
8587 static tree
8588 fold_builtin_0 (location_t loc, tree fndecl)
8589 {
8590 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8591 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8592 switch (fcode)
8593 {
8594 CASE_FLT_FN (BUILT_IN_INF):
8595 case BUILT_IN_INFD32:
8596 case BUILT_IN_INFD64:
8597 case BUILT_IN_INFD128:
8598 return fold_builtin_inf (loc, type, true);
8599
8600 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8601 return fold_builtin_inf (loc, type, false);
8602
8603 case BUILT_IN_CLASSIFY_TYPE:
8604 return fold_builtin_classify_type (NULL_TREE);
8605
8606 default:
8607 break;
8608 }
8609 return NULL_TREE;
8610 }
8611
8612 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8613 This function returns NULL_TREE if no simplification was possible. */
8614
8615 static tree
8616 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8617 {
8618 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8619 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8620 switch (fcode)
8621 {
8622 case BUILT_IN_CONSTANT_P:
8623 {
8624 tree val = fold_builtin_constant_p (arg0);
8625
8626 /* Gimplification will pull the CALL_EXPR for the builtin out of
8627 an if condition. When not optimizing, we'll not CSE it back.
8628 To avoid link error types of regressions, return false now. */
8629 if (!val && !optimize)
8630 val = integer_zero_node;
8631
8632 return val;
8633 }
8634
8635 case BUILT_IN_CLASSIFY_TYPE:
8636 return fold_builtin_classify_type (arg0);
8637
8638 case BUILT_IN_STRLEN:
8639 return fold_builtin_strlen (loc, type, arg0);
8640
8641 CASE_FLT_FN (BUILT_IN_FABS):
8642 case BUILT_IN_FABSD32:
8643 case BUILT_IN_FABSD64:
8644 case BUILT_IN_FABSD128:
8645 return fold_builtin_fabs (loc, arg0, type);
8646
8647 case BUILT_IN_ABS:
8648 case BUILT_IN_LABS:
8649 case BUILT_IN_LLABS:
8650 case BUILT_IN_IMAXABS:
8651 return fold_builtin_abs (loc, arg0, type);
8652
8653 CASE_FLT_FN (BUILT_IN_CONJ):
8654 if (validate_arg (arg0, COMPLEX_TYPE)
8655 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8656 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8657 break;
8658
8659 CASE_FLT_FN (BUILT_IN_CREAL):
8660 if (validate_arg (arg0, COMPLEX_TYPE)
8661 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8662 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8663 break;
8664
8665 CASE_FLT_FN (BUILT_IN_CIMAG):
8666 if (validate_arg (arg0, COMPLEX_TYPE)
8667 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8668 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8669 break;
8670
8671 CASE_FLT_FN (BUILT_IN_CCOS):
8672 if (validate_arg (arg0, COMPLEX_TYPE)
8673 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8674 return do_mpc_arg1 (arg0, type, mpc_cos);
8675 break;
8676
8677 CASE_FLT_FN (BUILT_IN_CCOSH):
8678 if (validate_arg (arg0, COMPLEX_TYPE)
8679 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8680 return do_mpc_arg1 (arg0, type, mpc_cosh);
8681 break;
8682
8683 CASE_FLT_FN (BUILT_IN_CPROJ):
8684 if (TREE_CODE (arg0) == COMPLEX_CST
8685 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8686 {
8687 const REAL_VALUE_TYPE *real
8688 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
8689 const REAL_VALUE_TYPE *imag
8690 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
8691
8692 if (real_isinf (real) || real_isinf (imag))
8693 return build_complex_inf (type, imag->sign);
8694 else
8695 return arg0;
8696 }
8697 break;
8698
8699 CASE_FLT_FN (BUILT_IN_CSIN):
8700 if (validate_arg (arg0, COMPLEX_TYPE)
8701 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8702 return do_mpc_arg1 (arg0, type, mpc_sin);
8703 break;
8704
8705 CASE_FLT_FN (BUILT_IN_CSINH):
8706 if (validate_arg (arg0, COMPLEX_TYPE)
8707 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8708 return do_mpc_arg1 (arg0, type, mpc_sinh);
8709 break;
8710
8711 CASE_FLT_FN (BUILT_IN_CTAN):
8712 if (validate_arg (arg0, COMPLEX_TYPE)
8713 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8714 return do_mpc_arg1 (arg0, type, mpc_tan);
8715 break;
8716
8717 CASE_FLT_FN (BUILT_IN_CTANH):
8718 if (validate_arg (arg0, COMPLEX_TYPE)
8719 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8720 return do_mpc_arg1 (arg0, type, mpc_tanh);
8721 break;
8722
8723 CASE_FLT_FN (BUILT_IN_CLOG):
8724 if (validate_arg (arg0, COMPLEX_TYPE)
8725 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8726 return do_mpc_arg1 (arg0, type, mpc_log);
8727 break;
8728
8729 CASE_FLT_FN (BUILT_IN_CSQRT):
8730 if (validate_arg (arg0, COMPLEX_TYPE)
8731 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8732 return do_mpc_arg1 (arg0, type, mpc_sqrt);
8733 break;
8734
8735 CASE_FLT_FN (BUILT_IN_CASIN):
8736 if (validate_arg (arg0, COMPLEX_TYPE)
8737 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8738 return do_mpc_arg1 (arg0, type, mpc_asin);
8739 break;
8740
8741 CASE_FLT_FN (BUILT_IN_CACOS):
8742 if (validate_arg (arg0, COMPLEX_TYPE)
8743 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8744 return do_mpc_arg1 (arg0, type, mpc_acos);
8745 break;
8746
8747 CASE_FLT_FN (BUILT_IN_CATAN):
8748 if (validate_arg (arg0, COMPLEX_TYPE)
8749 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8750 return do_mpc_arg1 (arg0, type, mpc_atan);
8751 break;
8752
8753 CASE_FLT_FN (BUILT_IN_CASINH):
8754 if (validate_arg (arg0, COMPLEX_TYPE)
8755 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8756 return do_mpc_arg1 (arg0, type, mpc_asinh);
8757 break;
8758
8759 CASE_FLT_FN (BUILT_IN_CACOSH):
8760 if (validate_arg (arg0, COMPLEX_TYPE)
8761 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8762 return do_mpc_arg1 (arg0, type, mpc_acosh);
8763 break;
8764
8765 CASE_FLT_FN (BUILT_IN_CATANH):
8766 if (validate_arg (arg0, COMPLEX_TYPE)
8767 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8768 return do_mpc_arg1 (arg0, type, mpc_atanh);
8769 break;
8770
8771 CASE_FLT_FN (BUILT_IN_CABS):
8772 if (TREE_CODE (arg0) == COMPLEX_CST
8773 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8774 return do_mpfr_arg2 (TREE_REALPART (arg0), TREE_IMAGPART (arg0),
8775 type, mpfr_hypot);
8776 break;
8777
8778 CASE_FLT_FN (BUILT_IN_CARG):
8779 return fold_builtin_carg (loc, arg0, type);
8780
8781 CASE_FLT_FN (BUILT_IN_SQRT):
8782 if (validate_arg (arg0, REAL_TYPE))
8783 return do_mpfr_arg1 (arg0, type, mpfr_sqrt, &dconst0, NULL, true);
8784 break;
8785
8786 CASE_FLT_FN (BUILT_IN_CBRT):
8787 if (validate_arg (arg0, REAL_TYPE))
8788 return do_mpfr_arg1 (arg0, type, mpfr_cbrt, NULL, NULL, 0);
8789 break;
8790
8791 CASE_FLT_FN (BUILT_IN_ASIN):
8792 if (validate_arg (arg0, REAL_TYPE))
8793 return do_mpfr_arg1 (arg0, type, mpfr_asin,
8794 &dconstm1, &dconst1, true);
8795 break;
8796
8797 CASE_FLT_FN (BUILT_IN_ACOS):
8798 if (validate_arg (arg0, REAL_TYPE))
8799 return do_mpfr_arg1 (arg0, type, mpfr_acos,
8800 &dconstm1, &dconst1, true);
8801 break;
8802
8803 CASE_FLT_FN (BUILT_IN_ATAN):
8804 if (validate_arg (arg0, REAL_TYPE))
8805 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
8806 break;
8807
8808 CASE_FLT_FN (BUILT_IN_ASINH):
8809 if (validate_arg (arg0, REAL_TYPE))
8810 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
8811 break;
8812
8813 CASE_FLT_FN (BUILT_IN_ACOSH):
8814 if (validate_arg (arg0, REAL_TYPE))
8815 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
8816 &dconst1, NULL, true);
8817 break;
8818
8819 CASE_FLT_FN (BUILT_IN_ATANH):
8820 if (validate_arg (arg0, REAL_TYPE))
8821 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
8822 &dconstm1, &dconst1, false);
8823 break;
8824
8825 CASE_FLT_FN (BUILT_IN_SIN):
8826 if (validate_arg (arg0, REAL_TYPE))
8827 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
8828 break;
8829
8830 CASE_FLT_FN (BUILT_IN_COS):
8831 if (validate_arg (arg0, REAL_TYPE))
8832 return do_mpfr_arg1 (arg0, type, mpfr_cos, NULL, NULL, 0);
8833 break;
8834
8835 CASE_FLT_FN (BUILT_IN_TAN):
8836 if (validate_arg (arg0, REAL_TYPE))
8837 return do_mpfr_arg1 (arg0, type, mpfr_tan, NULL, NULL, 0);
8838 break;
8839
8840 CASE_FLT_FN (BUILT_IN_CEXP):
8841 if (validate_arg (arg0, COMPLEX_TYPE)
8842 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8843 return do_mpc_arg1 (arg0, type, mpc_exp);
8844 break;
8845
8846 CASE_FLT_FN (BUILT_IN_CEXPI):
8847 if (validate_arg (arg0, REAL_TYPE))
8848 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
8849 break;
8850
8851 CASE_FLT_FN (BUILT_IN_SINH):
8852 if (validate_arg (arg0, REAL_TYPE))
8853 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
8854 break;
8855
8856 CASE_FLT_FN (BUILT_IN_COSH):
8857 if (validate_arg (arg0, REAL_TYPE))
8858 return do_mpfr_arg1 (arg0, type, mpfr_cosh, NULL, NULL, 0);
8859 break;
8860
8861 CASE_FLT_FN (BUILT_IN_TANH):
8862 if (validate_arg (arg0, REAL_TYPE))
8863 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
8864 break;
8865
8866 CASE_FLT_FN (BUILT_IN_ERF):
8867 if (validate_arg (arg0, REAL_TYPE))
8868 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
8869 break;
8870
8871 CASE_FLT_FN (BUILT_IN_ERFC):
8872 if (validate_arg (arg0, REAL_TYPE))
8873 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
8874 break;
8875
8876 CASE_FLT_FN (BUILT_IN_TGAMMA):
8877 if (validate_arg (arg0, REAL_TYPE))
8878 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
8879 break;
8880
8881 CASE_FLT_FN (BUILT_IN_EXP):
8882 if (validate_arg (arg0, REAL_TYPE))
8883 return do_mpfr_arg1 (arg0, type, mpfr_exp, NULL, NULL, 0);
8884 break;
8885
8886 CASE_FLT_FN (BUILT_IN_EXP2):
8887 if (validate_arg (arg0, REAL_TYPE))
8888 return do_mpfr_arg1 (arg0, type, mpfr_exp2, NULL, NULL, 0);
8889 break;
8890
8891 CASE_FLT_FN (BUILT_IN_EXP10):
8892 CASE_FLT_FN (BUILT_IN_POW10):
8893 if (validate_arg (arg0, REAL_TYPE))
8894 return do_mpfr_arg1 (arg0, type, mpfr_exp10, NULL, NULL, 0);
8895 break;
8896
8897 CASE_FLT_FN (BUILT_IN_EXPM1):
8898 if (validate_arg (arg0, REAL_TYPE))
8899 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
8900 break;
8901
8902 CASE_FLT_FN (BUILT_IN_LOG):
8903 if (validate_arg (arg0, REAL_TYPE))
8904 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
8905 break;
8906
8907 CASE_FLT_FN (BUILT_IN_LOG2):
8908 if (validate_arg (arg0, REAL_TYPE))
8909 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
8910 break;
8911
8912 CASE_FLT_FN (BUILT_IN_LOG10):
8913 if (validate_arg (arg0, REAL_TYPE))
8914 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
8915 break;
8916
8917 CASE_FLT_FN (BUILT_IN_LOG1P):
8918 if (validate_arg (arg0, REAL_TYPE))
8919 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
8920 &dconstm1, NULL, false);
8921 break;
8922
8923 CASE_FLT_FN (BUILT_IN_J0):
8924 if (validate_arg (arg0, REAL_TYPE))
8925 return do_mpfr_arg1 (arg0, type, mpfr_j0,
8926 NULL, NULL, 0);
8927 break;
8928
8929 CASE_FLT_FN (BUILT_IN_J1):
8930 if (validate_arg (arg0, REAL_TYPE))
8931 return do_mpfr_arg1 (arg0, type, mpfr_j1,
8932 NULL, NULL, 0);
8933 break;
8934
8935 CASE_FLT_FN (BUILT_IN_Y0):
8936 if (validate_arg (arg0, REAL_TYPE))
8937 return do_mpfr_arg1 (arg0, type, mpfr_y0,
8938 &dconst0, NULL, false);
8939 break;
8940
8941 CASE_FLT_FN (BUILT_IN_Y1):
8942 if (validate_arg (arg0, REAL_TYPE))
8943 return do_mpfr_arg1 (arg0, type, mpfr_y1,
8944 &dconst0, NULL, false);
8945 break;
8946
8947 CASE_FLT_FN (BUILT_IN_NAN):
8948 case BUILT_IN_NAND32:
8949 case BUILT_IN_NAND64:
8950 case BUILT_IN_NAND128:
8951 return fold_builtin_nan (arg0, type, true);
8952
8953 CASE_FLT_FN (BUILT_IN_NANS):
8954 return fold_builtin_nan (arg0, type, false);
8955
8956 CASE_FLT_FN (BUILT_IN_FLOOR):
8957 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8958 {
8959 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8960 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8961 {
8962 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8963 REAL_VALUE_TYPE r;
8964 real_floor (&r, TYPE_MODE (type), &x);
8965 return build_real (type, r);
8966 }
8967 }
8968 break;
8969
8970 CASE_FLT_FN (BUILT_IN_CEIL):
8971 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8972 {
8973 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8974 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8975 {
8976 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8977 REAL_VALUE_TYPE r;
8978 real_ceil (&r, TYPE_MODE (type), &x);
8979 return build_real (type, r);
8980 }
8981 }
8982 break;
8983
8984 CASE_FLT_FN (BUILT_IN_TRUNC):
8985 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8986 {
8987 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8988 REAL_VALUE_TYPE r;
8989 real_trunc (&r, TYPE_MODE (type), &x);
8990 return build_real (type, r);
8991 }
8992 break;
8993
8994 CASE_FLT_FN (BUILT_IN_ROUND):
8995 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
8996 {
8997 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
8998 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
8999 {
9000 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9001 REAL_VALUE_TYPE r;
9002 real_round (&r, TYPE_MODE (type), &x);
9003 return build_real (type, r);
9004 }
9005 }
9006 break;
9007
9008 CASE_FLT_FN (BUILT_IN_ICEIL):
9009 CASE_FLT_FN (BUILT_IN_LCEIL):
9010 CASE_FLT_FN (BUILT_IN_LLCEIL):
9011 return do_real_to_int_conversion (type, arg0, real_ceil);
9012
9013 CASE_FLT_FN (BUILT_IN_LFLOOR):
9014 CASE_FLT_FN (BUILT_IN_IFLOOR):
9015 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9016 return do_real_to_int_conversion (type, arg0, real_floor);
9017
9018 CASE_FLT_FN (BUILT_IN_IROUND):
9019 CASE_FLT_FN (BUILT_IN_LROUND):
9020 CASE_FLT_FN (BUILT_IN_LLROUND):
9021 return do_real_to_int_conversion (type, arg0, real_round);
9022
9023 CASE_FLT_FN (BUILT_IN_IRINT):
9024 CASE_FLT_FN (BUILT_IN_LRINT):
9025 CASE_FLT_FN (BUILT_IN_LLRINT):
9026 /* Not yet folded to a constant. */
9027 return NULL_TREE;
9028
9029 case BUILT_IN_BSWAP16:
9030 case BUILT_IN_BSWAP32:
9031 case BUILT_IN_BSWAP64:
9032 return fold_builtin_bswap (fndecl, arg0);
9033
9034 CASE_INT_FN (BUILT_IN_FFS):
9035 CASE_INT_FN (BUILT_IN_CLZ):
9036 CASE_INT_FN (BUILT_IN_CTZ):
9037 CASE_INT_FN (BUILT_IN_CLRSB):
9038 CASE_INT_FN (BUILT_IN_POPCOUNT):
9039 CASE_INT_FN (BUILT_IN_PARITY):
9040 return fold_builtin_bitop (fndecl, arg0);
9041
9042 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9043 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
9044 return (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))
9045 ? build_one_cst (type)
9046 : build_zero_cst (type));
9047 break;
9048
9049 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9050 return fold_builtin_significand (loc, arg0, type);
9051
9052 CASE_FLT_FN (BUILT_IN_ILOGB):
9053 CASE_FLT_FN (BUILT_IN_LOGB):
9054 return fold_builtin_logb (loc, arg0, type);
9055
9056 case BUILT_IN_ISASCII:
9057 return fold_builtin_isascii (loc, arg0);
9058
9059 case BUILT_IN_TOASCII:
9060 return fold_builtin_toascii (loc, arg0);
9061
9062 case BUILT_IN_ISDIGIT:
9063 return fold_builtin_isdigit (loc, arg0);
9064
9065 CASE_FLT_FN (BUILT_IN_FINITE):
9066 case BUILT_IN_FINITED32:
9067 case BUILT_IN_FINITED64:
9068 case BUILT_IN_FINITED128:
9069 case BUILT_IN_ISFINITE:
9070 {
9071 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9072 if (ret)
9073 return ret;
9074 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9075 }
9076
9077 CASE_FLT_FN (BUILT_IN_ISINF):
9078 case BUILT_IN_ISINFD32:
9079 case BUILT_IN_ISINFD64:
9080 case BUILT_IN_ISINFD128:
9081 {
9082 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9083 if (ret)
9084 return ret;
9085 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9086 }
9087
9088 case BUILT_IN_ISNORMAL:
9089 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9090
9091 case BUILT_IN_ISINF_SIGN:
9092 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9093
9094 CASE_FLT_FN (BUILT_IN_ISNAN):
9095 case BUILT_IN_ISNAND32:
9096 case BUILT_IN_ISNAND64:
9097 case BUILT_IN_ISNAND128:
9098 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9099
9100 case BUILT_IN_FREE:
9101 if (integer_zerop (arg0))
9102 return build_empty_stmt (loc);
9103 break;
9104
9105 default:
9106 break;
9107 }
9108
9109 return NULL_TREE;
9110
9111 }
9112
9113 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9114 This function returns NULL_TREE if no simplification was possible. */
9115
9116 static tree
9117 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9118 {
9119 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9120 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9121
9122 switch (fcode)
9123 {
9124 CASE_FLT_FN (BUILT_IN_JN):
9125 if (validate_arg (arg0, INTEGER_TYPE)
9126 && validate_arg (arg1, REAL_TYPE))
9127 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9128 break;
9129
9130 CASE_FLT_FN (BUILT_IN_YN):
9131 if (validate_arg (arg0, INTEGER_TYPE)
9132 && validate_arg (arg1, REAL_TYPE))
9133 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9134 &dconst0, false);
9135 break;
9136
9137 CASE_FLT_FN (BUILT_IN_DREM):
9138 CASE_FLT_FN (BUILT_IN_REMAINDER):
9139 if (validate_arg (arg0, REAL_TYPE)
9140 && validate_arg (arg1, REAL_TYPE))
9141 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9142 break;
9143
9144 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9145 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9146 if (validate_arg (arg0, REAL_TYPE)
9147 && validate_arg (arg1, POINTER_TYPE))
9148 return do_mpfr_lgamma_r (arg0, arg1, type);
9149 break;
9150
9151 CASE_FLT_FN (BUILT_IN_ATAN2):
9152 if (validate_arg (arg0, REAL_TYPE)
9153 && validate_arg (arg1, REAL_TYPE))
9154 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9155 break;
9156
9157 CASE_FLT_FN (BUILT_IN_FDIM):
9158 if (validate_arg (arg0, REAL_TYPE)
9159 && validate_arg (arg1, REAL_TYPE))
9160 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9161 break;
9162
9163 CASE_FLT_FN (BUILT_IN_HYPOT):
9164 if (validate_arg (arg0, REAL_TYPE)
9165 && validate_arg (arg1, REAL_TYPE))
9166 return do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot);
9167 break;
9168
9169 CASE_FLT_FN (BUILT_IN_CPOW):
9170 if (validate_arg (arg0, COMPLEX_TYPE)
9171 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9172 && validate_arg (arg1, COMPLEX_TYPE)
9173 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9174 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9175 break;
9176
9177 CASE_FLT_FN (BUILT_IN_LDEXP):
9178 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9179 CASE_FLT_FN (BUILT_IN_SCALBN):
9180 CASE_FLT_FN (BUILT_IN_SCALBLN):
9181 return fold_builtin_load_exponent (loc, arg0, arg1,
9182 type, /*ldexp=*/false);
9183
9184 CASE_FLT_FN (BUILT_IN_FREXP):
9185 return fold_builtin_frexp (loc, arg0, arg1, type);
9186
9187 CASE_FLT_FN (BUILT_IN_MODF):
9188 return fold_builtin_modf (loc, arg0, arg1, type);
9189
9190 case BUILT_IN_STRSTR:
9191 return fold_builtin_strstr (loc, arg0, arg1, type);
9192
9193 case BUILT_IN_STRSPN:
9194 return fold_builtin_strspn (loc, arg0, arg1);
9195
9196 case BUILT_IN_STRCSPN:
9197 return fold_builtin_strcspn (loc, arg0, arg1);
9198
9199 case BUILT_IN_STRCHR:
9200 case BUILT_IN_INDEX:
9201 return fold_builtin_strchr (loc, arg0, arg1, type);
9202
9203 case BUILT_IN_STRRCHR:
9204 case BUILT_IN_RINDEX:
9205 return fold_builtin_strrchr (loc, arg0, arg1, type);
9206
9207 case BUILT_IN_STRCMP:
9208 return fold_builtin_strcmp (loc, arg0, arg1);
9209
9210 case BUILT_IN_STRPBRK:
9211 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9212
9213 case BUILT_IN_EXPECT:
9214 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9215
9216 CASE_FLT_FN (BUILT_IN_POW):
9217 return fold_const_builtin_pow (arg0, arg1, type);
9218
9219 CASE_FLT_FN (BUILT_IN_POWI):
9220 if (TREE_CODE (arg0) == REAL_CST
9221 && !TREE_OVERFLOW (arg0)
9222 && tree_fits_shwi_p (arg1))
9223 {
9224 HOST_WIDE_INT c = tree_to_shwi (arg1);
9225 REAL_VALUE_TYPE x;
9226 real_powi (&x, TYPE_MODE (type), TREE_REAL_CST_PTR (arg0), c);
9227 return build_real (type, x);
9228 }
9229 break;
9230
9231 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9232 if (TREE_CODE (arg0) == REAL_CST
9233 && TREE_CODE (arg1) == REAL_CST
9234 && !TREE_OVERFLOW (arg0)
9235 && !TREE_OVERFLOW (arg1))
9236 {
9237 REAL_VALUE_TYPE c1 = TREE_REAL_CST (arg0);
9238 real_copysign (&c1, TREE_REAL_CST_PTR (arg1));
9239 return build_real (type, c1);
9240 }
9241 break;
9242
9243 CASE_FLT_FN (BUILT_IN_FMIN):
9244 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
9245
9246 CASE_FLT_FN (BUILT_IN_FMAX):
9247 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
9248
9249 case BUILT_IN_ISGREATER:
9250 return fold_builtin_unordered_cmp (loc, fndecl,
9251 arg0, arg1, UNLE_EXPR, LE_EXPR);
9252 case BUILT_IN_ISGREATEREQUAL:
9253 return fold_builtin_unordered_cmp (loc, fndecl,
9254 arg0, arg1, UNLT_EXPR, LT_EXPR);
9255 case BUILT_IN_ISLESS:
9256 return fold_builtin_unordered_cmp (loc, fndecl,
9257 arg0, arg1, UNGE_EXPR, GE_EXPR);
9258 case BUILT_IN_ISLESSEQUAL:
9259 return fold_builtin_unordered_cmp (loc, fndecl,
9260 arg0, arg1, UNGT_EXPR, GT_EXPR);
9261 case BUILT_IN_ISLESSGREATER:
9262 return fold_builtin_unordered_cmp (loc, fndecl,
9263 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9264 case BUILT_IN_ISUNORDERED:
9265 return fold_builtin_unordered_cmp (loc, fndecl,
9266 arg0, arg1, UNORDERED_EXPR,
9267 NOP_EXPR);
9268
9269 /* We do the folding for va_start in the expander. */
9270 case BUILT_IN_VA_START:
9271 break;
9272
9273 case BUILT_IN_OBJECT_SIZE:
9274 return fold_builtin_object_size (arg0, arg1);
9275
9276 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9277 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9278
9279 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9280 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9281
9282 default:
9283 break;
9284 }
9285 return NULL_TREE;
9286 }
9287
9288 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9289 and ARG2.
9290 This function returns NULL_TREE if no simplification was possible. */
9291
9292 static tree
9293 fold_builtin_3 (location_t loc, tree fndecl,
9294 tree arg0, tree arg1, tree arg2)
9295 {
9296 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9297 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9298 switch (fcode)
9299 {
9300
9301 CASE_FLT_FN (BUILT_IN_SINCOS):
9302 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9303
9304 CASE_FLT_FN (BUILT_IN_FMA):
9305 if (tree tem = fold_fma (loc, type, arg0, arg1, arg2))
9306 return tem;
9307 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9308
9309 CASE_FLT_FN (BUILT_IN_REMQUO):
9310 if (validate_arg (arg0, REAL_TYPE)
9311 && validate_arg (arg1, REAL_TYPE)
9312 && validate_arg (arg2, POINTER_TYPE))
9313 return do_mpfr_remquo (arg0, arg1, arg2);
9314 break;
9315
9316 case BUILT_IN_STRNCMP:
9317 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
9318
9319 case BUILT_IN_MEMCHR:
9320 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
9321
9322 case BUILT_IN_BCMP:
9323 case BUILT_IN_MEMCMP:
9324 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9325
9326 case BUILT_IN_EXPECT:
9327 return fold_builtin_expect (loc, arg0, arg1, arg2);
9328
9329 case BUILT_IN_ADD_OVERFLOW:
9330 case BUILT_IN_SUB_OVERFLOW:
9331 case BUILT_IN_MUL_OVERFLOW:
9332 case BUILT_IN_SADD_OVERFLOW:
9333 case BUILT_IN_SADDL_OVERFLOW:
9334 case BUILT_IN_SADDLL_OVERFLOW:
9335 case BUILT_IN_SSUB_OVERFLOW:
9336 case BUILT_IN_SSUBL_OVERFLOW:
9337 case BUILT_IN_SSUBLL_OVERFLOW:
9338 case BUILT_IN_SMUL_OVERFLOW:
9339 case BUILT_IN_SMULL_OVERFLOW:
9340 case BUILT_IN_SMULLL_OVERFLOW:
9341 case BUILT_IN_UADD_OVERFLOW:
9342 case BUILT_IN_UADDL_OVERFLOW:
9343 case BUILT_IN_UADDLL_OVERFLOW:
9344 case BUILT_IN_USUB_OVERFLOW:
9345 case BUILT_IN_USUBL_OVERFLOW:
9346 case BUILT_IN_USUBLL_OVERFLOW:
9347 case BUILT_IN_UMUL_OVERFLOW:
9348 case BUILT_IN_UMULL_OVERFLOW:
9349 case BUILT_IN_UMULLL_OVERFLOW:
9350 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9351
9352 default:
9353 break;
9354 }
9355 return NULL_TREE;
9356 }
9357
9358 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9359 arguments. IGNORE is true if the result of the
9360 function call is ignored. This function returns NULL_TREE if no
9361 simplification was possible. */
9362
9363 tree
9364 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9365 {
9366 tree ret = NULL_TREE;
9367
9368 switch (nargs)
9369 {
9370 case 0:
9371 ret = fold_builtin_0 (loc, fndecl);
9372 break;
9373 case 1:
9374 ret = fold_builtin_1 (loc, fndecl, args[0]);
9375 break;
9376 case 2:
9377 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9378 break;
9379 case 3:
9380 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9381 break;
9382 default:
9383 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9384 break;
9385 }
9386 if (ret)
9387 {
9388 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9389 SET_EXPR_LOCATION (ret, loc);
9390 TREE_NO_WARNING (ret) = 1;
9391 return ret;
9392 }
9393 return NULL_TREE;
9394 }
9395
9396 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9397 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9398 of arguments in ARGS to be omitted. OLDNARGS is the number of
9399 elements in ARGS. */
9400
9401 static tree
9402 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9403 int skip, tree fndecl, int n, va_list newargs)
9404 {
9405 int nargs = oldnargs - skip + n;
9406 tree *buffer;
9407
9408 if (n > 0)
9409 {
9410 int i, j;
9411
9412 buffer = XALLOCAVEC (tree, nargs);
9413 for (i = 0; i < n; i++)
9414 buffer[i] = va_arg (newargs, tree);
9415 for (j = skip; j < oldnargs; j++, i++)
9416 buffer[i] = args[j];
9417 }
9418 else
9419 buffer = args + skip;
9420
9421 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9422 }
9423
9424 /* Return true if FNDECL shouldn't be folded right now.
9425 If a built-in function has an inline attribute always_inline
9426 wrapper, defer folding it after always_inline functions have
9427 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9428 might not be performed. */
9429
9430 bool
9431 avoid_folding_inline_builtin (tree fndecl)
9432 {
9433 return (DECL_DECLARED_INLINE_P (fndecl)
9434 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9435 && cfun
9436 && !cfun->always_inline_functions_inlined
9437 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9438 }
9439
9440 /* A wrapper function for builtin folding that prevents warnings for
9441 "statement without effect" and the like, caused by removing the
9442 call node earlier than the warning is generated. */
9443
9444 tree
9445 fold_call_expr (location_t loc, tree exp, bool ignore)
9446 {
9447 tree ret = NULL_TREE;
9448 tree fndecl = get_callee_fndecl (exp);
9449 if (fndecl
9450 && TREE_CODE (fndecl) == FUNCTION_DECL
9451 && DECL_BUILT_IN (fndecl)
9452 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9453 yet. Defer folding until we see all the arguments
9454 (after inlining). */
9455 && !CALL_EXPR_VA_ARG_PACK (exp))
9456 {
9457 int nargs = call_expr_nargs (exp);
9458
9459 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9460 instead last argument is __builtin_va_arg_pack (). Defer folding
9461 even in that case, until arguments are finalized. */
9462 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9463 {
9464 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9465 if (fndecl2
9466 && TREE_CODE (fndecl2) == FUNCTION_DECL
9467 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9468 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9469 return NULL_TREE;
9470 }
9471
9472 if (avoid_folding_inline_builtin (fndecl))
9473 return NULL_TREE;
9474
9475 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9476 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9477 CALL_EXPR_ARGP (exp), ignore);
9478 else
9479 {
9480 tree *args = CALL_EXPR_ARGP (exp);
9481 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9482 if (ret)
9483 return ret;
9484 }
9485 }
9486 return NULL_TREE;
9487 }
9488
9489 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9490 N arguments are passed in the array ARGARRAY. Return a folded
9491 expression or NULL_TREE if no simplification was possible. */
9492
9493 tree
9494 fold_builtin_call_array (location_t loc, tree,
9495 tree fn,
9496 int n,
9497 tree *argarray)
9498 {
9499 if (TREE_CODE (fn) != ADDR_EXPR)
9500 return NULL_TREE;
9501
9502 tree fndecl = TREE_OPERAND (fn, 0);
9503 if (TREE_CODE (fndecl) == FUNCTION_DECL
9504 && DECL_BUILT_IN (fndecl))
9505 {
9506 /* If last argument is __builtin_va_arg_pack (), arguments to this
9507 function are not finalized yet. Defer folding until they are. */
9508 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9509 {
9510 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9511 if (fndecl2
9512 && TREE_CODE (fndecl2) == FUNCTION_DECL
9513 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9514 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9515 return NULL_TREE;
9516 }
9517 if (avoid_folding_inline_builtin (fndecl))
9518 return NULL_TREE;
9519 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9520 return targetm.fold_builtin (fndecl, n, argarray, false);
9521 else
9522 return fold_builtin_n (loc, fndecl, argarray, n, false);
9523 }
9524
9525 return NULL_TREE;
9526 }
9527
9528 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9529 along with N new arguments specified as the "..." parameters. SKIP
9530 is the number of arguments in EXP to be omitted. This function is used
9531 to do varargs-to-varargs transformations. */
9532
9533 static tree
9534 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9535 {
9536 va_list ap;
9537 tree t;
9538
9539 va_start (ap, n);
9540 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9541 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9542 va_end (ap);
9543
9544 return t;
9545 }
9546
9547 /* Validate a single argument ARG against a tree code CODE representing
9548 a type. */
9549
9550 static bool
9551 validate_arg (const_tree arg, enum tree_code code)
9552 {
9553 if (!arg)
9554 return false;
9555 else if (code == POINTER_TYPE)
9556 return POINTER_TYPE_P (TREE_TYPE (arg));
9557 else if (code == INTEGER_TYPE)
9558 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9559 return code == TREE_CODE (TREE_TYPE (arg));
9560 }
9561
9562 /* This function validates the types of a function call argument list
9563 against a specified list of tree_codes. If the last specifier is a 0,
9564 that represents an ellipses, otherwise the last specifier must be a
9565 VOID_TYPE.
9566
9567 This is the GIMPLE version of validate_arglist. Eventually we want to
9568 completely convert builtins.c to work from GIMPLEs and the tree based
9569 validate_arglist will then be removed. */
9570
9571 bool
9572 validate_gimple_arglist (const gcall *call, ...)
9573 {
9574 enum tree_code code;
9575 bool res = 0;
9576 va_list ap;
9577 const_tree arg;
9578 size_t i;
9579
9580 va_start (ap, call);
9581 i = 0;
9582
9583 do
9584 {
9585 code = (enum tree_code) va_arg (ap, int);
9586 switch (code)
9587 {
9588 case 0:
9589 /* This signifies an ellipses, any further arguments are all ok. */
9590 res = true;
9591 goto end;
9592 case VOID_TYPE:
9593 /* This signifies an endlink, if no arguments remain, return
9594 true, otherwise return false. */
9595 res = (i == gimple_call_num_args (call));
9596 goto end;
9597 default:
9598 /* If no parameters remain or the parameter's code does not
9599 match the specified code, return false. Otherwise continue
9600 checking any remaining arguments. */
9601 arg = gimple_call_arg (call, i++);
9602 if (!validate_arg (arg, code))
9603 goto end;
9604 break;
9605 }
9606 }
9607 while (1);
9608
9609 /* We need gotos here since we can only have one VA_CLOSE in a
9610 function. */
9611 end: ;
9612 va_end (ap);
9613
9614 return res;
9615 }
9616
9617 /* Default target-specific builtin expander that does nothing. */
9618
9619 rtx
9620 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9621 rtx target ATTRIBUTE_UNUSED,
9622 rtx subtarget ATTRIBUTE_UNUSED,
9623 machine_mode mode ATTRIBUTE_UNUSED,
9624 int ignore ATTRIBUTE_UNUSED)
9625 {
9626 return NULL_RTX;
9627 }
9628
9629 /* Returns true is EXP represents data that would potentially reside
9630 in a readonly section. */
9631
9632 bool
9633 readonly_data_expr (tree exp)
9634 {
9635 STRIP_NOPS (exp);
9636
9637 if (TREE_CODE (exp) != ADDR_EXPR)
9638 return false;
9639
9640 exp = get_base_address (TREE_OPERAND (exp, 0));
9641 if (!exp)
9642 return false;
9643
9644 /* Make sure we call decl_readonly_section only for trees it
9645 can handle (since it returns true for everything it doesn't
9646 understand). */
9647 if (TREE_CODE (exp) == STRING_CST
9648 || TREE_CODE (exp) == CONSTRUCTOR
9649 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
9650 return decl_readonly_section (exp, 0);
9651 else
9652 return false;
9653 }
9654
9655 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
9656 to the call, and TYPE is its return type.
9657
9658 Return NULL_TREE if no simplification was possible, otherwise return the
9659 simplified form of the call as a tree.
9660
9661 The simplified form may be a constant or other expression which
9662 computes the same value, but in a more efficient manner (including
9663 calls to other builtin functions).
9664
9665 The call may contain arguments which need to be evaluated, but
9666 which are not useful to determine the result of the call. In
9667 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9668 COMPOUND_EXPR will be an argument which must be evaluated.
9669 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9670 COMPOUND_EXPR in the chain will contain the tree for the simplified
9671 form of the builtin function call. */
9672
9673 static tree
9674 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
9675 {
9676 if (!validate_arg (s1, POINTER_TYPE)
9677 || !validate_arg (s2, POINTER_TYPE))
9678 return NULL_TREE;
9679 else
9680 {
9681 tree fn;
9682 const char *p1, *p2;
9683
9684 p2 = c_getstr (s2);
9685 if (p2 == NULL)
9686 return NULL_TREE;
9687
9688 p1 = c_getstr (s1);
9689 if (p1 != NULL)
9690 {
9691 const char *r = strstr (p1, p2);
9692 tree tem;
9693
9694 if (r == NULL)
9695 return build_int_cst (TREE_TYPE (s1), 0);
9696
9697 /* Return an offset into the constant string argument. */
9698 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9699 return fold_convert_loc (loc, type, tem);
9700 }
9701
9702 /* The argument is const char *, and the result is char *, so we need
9703 a type conversion here to avoid a warning. */
9704 if (p2[0] == '\0')
9705 return fold_convert_loc (loc, type, s1);
9706
9707 if (p2[1] != '\0')
9708 return NULL_TREE;
9709
9710 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9711 if (!fn)
9712 return NULL_TREE;
9713
9714 /* New argument list transforming strstr(s1, s2) to
9715 strchr(s1, s2[0]). */
9716 return build_call_expr_loc (loc, fn, 2, s1,
9717 build_int_cst (integer_type_node, p2[0]));
9718 }
9719 }
9720
9721 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
9722 the call, and TYPE is its return type.
9723
9724 Return NULL_TREE if no simplification was possible, otherwise return the
9725 simplified form of the call as a tree.
9726
9727 The simplified form may be a constant or other expression which
9728 computes the same value, but in a more efficient manner (including
9729 calls to other builtin functions).
9730
9731 The call may contain arguments which need to be evaluated, but
9732 which are not useful to determine the result of the call. In
9733 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9734 COMPOUND_EXPR will be an argument which must be evaluated.
9735 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9736 COMPOUND_EXPR in the chain will contain the tree for the simplified
9737 form of the builtin function call. */
9738
9739 static tree
9740 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
9741 {
9742 if (!validate_arg (s1, POINTER_TYPE)
9743 || !validate_arg (s2, INTEGER_TYPE))
9744 return NULL_TREE;
9745 else
9746 {
9747 const char *p1;
9748
9749 if (TREE_CODE (s2) != INTEGER_CST)
9750 return NULL_TREE;
9751
9752 p1 = c_getstr (s1);
9753 if (p1 != NULL)
9754 {
9755 char c;
9756 const char *r;
9757 tree tem;
9758
9759 if (target_char_cast (s2, &c))
9760 return NULL_TREE;
9761
9762 r = strchr (p1, c);
9763
9764 if (r == NULL)
9765 return build_int_cst (TREE_TYPE (s1), 0);
9766
9767 /* Return an offset into the constant string argument. */
9768 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9769 return fold_convert_loc (loc, type, tem);
9770 }
9771 return NULL_TREE;
9772 }
9773 }
9774
9775 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
9776 the call, and TYPE is its return type.
9777
9778 Return NULL_TREE if no simplification was possible, otherwise return the
9779 simplified form of the call as a tree.
9780
9781 The simplified form may be a constant or other expression which
9782 computes the same value, but in a more efficient manner (including
9783 calls to other builtin functions).
9784
9785 The call may contain arguments which need to be evaluated, but
9786 which are not useful to determine the result of the call. In
9787 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9788 COMPOUND_EXPR will be an argument which must be evaluated.
9789 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9790 COMPOUND_EXPR in the chain will contain the tree for the simplified
9791 form of the builtin function call. */
9792
9793 static tree
9794 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
9795 {
9796 if (!validate_arg (s1, POINTER_TYPE)
9797 || !validate_arg (s2, INTEGER_TYPE))
9798 return NULL_TREE;
9799 else
9800 {
9801 tree fn;
9802 const char *p1;
9803
9804 if (TREE_CODE (s2) != INTEGER_CST)
9805 return NULL_TREE;
9806
9807 p1 = c_getstr (s1);
9808 if (p1 != NULL)
9809 {
9810 char c;
9811 const char *r;
9812 tree tem;
9813
9814 if (target_char_cast (s2, &c))
9815 return NULL_TREE;
9816
9817 r = strrchr (p1, c);
9818
9819 if (r == NULL)
9820 return build_int_cst (TREE_TYPE (s1), 0);
9821
9822 /* Return an offset into the constant string argument. */
9823 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9824 return fold_convert_loc (loc, type, tem);
9825 }
9826
9827 if (! integer_zerop (s2))
9828 return NULL_TREE;
9829
9830 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9831 if (!fn)
9832 return NULL_TREE;
9833
9834 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9835 return build_call_expr_loc (loc, fn, 2, s1, s2);
9836 }
9837 }
9838
9839 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9840 to the call, and TYPE is its return type.
9841
9842 Return NULL_TREE if no simplification was possible, otherwise return the
9843 simplified form of the call as a tree.
9844
9845 The simplified form may be a constant or other expression which
9846 computes the same value, but in a more efficient manner (including
9847 calls to other builtin functions).
9848
9849 The call may contain arguments which need to be evaluated, but
9850 which are not useful to determine the result of the call. In
9851 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9852 COMPOUND_EXPR will be an argument which must be evaluated.
9853 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9854 COMPOUND_EXPR in the chain will contain the tree for the simplified
9855 form of the builtin function call. */
9856
9857 static tree
9858 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9859 {
9860 if (!validate_arg (s1, POINTER_TYPE)
9861 || !validate_arg (s2, POINTER_TYPE))
9862 return NULL_TREE;
9863 else
9864 {
9865 tree fn;
9866 const char *p1, *p2;
9867
9868 p2 = c_getstr (s2);
9869 if (p2 == NULL)
9870 return NULL_TREE;
9871
9872 p1 = c_getstr (s1);
9873 if (p1 != NULL)
9874 {
9875 const char *r = strpbrk (p1, p2);
9876 tree tem;
9877
9878 if (r == NULL)
9879 return build_int_cst (TREE_TYPE (s1), 0);
9880
9881 /* Return an offset into the constant string argument. */
9882 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9883 return fold_convert_loc (loc, type, tem);
9884 }
9885
9886 if (p2[0] == '\0')
9887 /* strpbrk(x, "") == NULL.
9888 Evaluate and ignore s1 in case it had side-effects. */
9889 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9890
9891 if (p2[1] != '\0')
9892 return NULL_TREE; /* Really call strpbrk. */
9893
9894 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9895 if (!fn)
9896 return NULL_TREE;
9897
9898 /* New argument list transforming strpbrk(s1, s2) to
9899 strchr(s1, s2[0]). */
9900 return build_call_expr_loc (loc, fn, 2, s1,
9901 build_int_cst (integer_type_node, p2[0]));
9902 }
9903 }
9904
9905 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9906 to the call.
9907
9908 Return NULL_TREE if no simplification was possible, otherwise return the
9909 simplified form of the call as a tree.
9910
9911 The simplified form may be a constant or other expression which
9912 computes the same value, but in a more efficient manner (including
9913 calls to other builtin functions).
9914
9915 The call may contain arguments which need to be evaluated, but
9916 which are not useful to determine the result of the call. In
9917 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9918 COMPOUND_EXPR will be an argument which must be evaluated.
9919 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9920 COMPOUND_EXPR in the chain will contain the tree for the simplified
9921 form of the builtin function call. */
9922
9923 static tree
9924 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9925 {
9926 if (!validate_arg (s1, POINTER_TYPE)
9927 || !validate_arg (s2, POINTER_TYPE))
9928 return NULL_TREE;
9929 else
9930 {
9931 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9932
9933 /* If both arguments are constants, evaluate at compile-time. */
9934 if (p1 && p2)
9935 {
9936 const size_t r = strspn (p1, p2);
9937 return build_int_cst (size_type_node, r);
9938 }
9939
9940 /* If either argument is "", return NULL_TREE. */
9941 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9942 /* Evaluate and ignore both arguments in case either one has
9943 side-effects. */
9944 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9945 s1, s2);
9946 return NULL_TREE;
9947 }
9948 }
9949
9950 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9951 to the call.
9952
9953 Return NULL_TREE if no simplification was possible, otherwise return the
9954 simplified form of the call as a tree.
9955
9956 The simplified form may be a constant or other expression which
9957 computes the same value, but in a more efficient manner (including
9958 calls to other builtin functions).
9959
9960 The call may contain arguments which need to be evaluated, but
9961 which are not useful to determine the result of the call. In
9962 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9963 COMPOUND_EXPR will be an argument which must be evaluated.
9964 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9965 COMPOUND_EXPR in the chain will contain the tree for the simplified
9966 form of the builtin function call. */
9967
9968 static tree
9969 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9970 {
9971 if (!validate_arg (s1, POINTER_TYPE)
9972 || !validate_arg (s2, POINTER_TYPE))
9973 return NULL_TREE;
9974 else
9975 {
9976 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9977
9978 /* If both arguments are constants, evaluate at compile-time. */
9979 if (p1 && p2)
9980 {
9981 const size_t r = strcspn (p1, p2);
9982 return build_int_cst (size_type_node, r);
9983 }
9984
9985 /* If the first argument is "", return NULL_TREE. */
9986 if (p1 && *p1 == '\0')
9987 {
9988 /* Evaluate and ignore argument s2 in case it has
9989 side-effects. */
9990 return omit_one_operand_loc (loc, size_type_node,
9991 size_zero_node, s2);
9992 }
9993
9994 /* If the second argument is "", return __builtin_strlen(s1). */
9995 if (p2 && *p2 == '\0')
9996 {
9997 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9998
9999 /* If the replacement _DECL isn't initialized, don't do the
10000 transformation. */
10001 if (!fn)
10002 return NULL_TREE;
10003
10004 return build_call_expr_loc (loc, fn, 1, s1);
10005 }
10006 return NULL_TREE;
10007 }
10008 }
10009
10010 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10011 produced. False otherwise. This is done so that we don't output the error
10012 or warning twice or three times. */
10013
10014 bool
10015 fold_builtin_next_arg (tree exp, bool va_start_p)
10016 {
10017 tree fntype = TREE_TYPE (current_function_decl);
10018 int nargs = call_expr_nargs (exp);
10019 tree arg;
10020 /* There is good chance the current input_location points inside the
10021 definition of the va_start macro (perhaps on the token for
10022 builtin) in a system header, so warnings will not be emitted.
10023 Use the location in real source code. */
10024 source_location current_location =
10025 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10026 NULL);
10027
10028 if (!stdarg_p (fntype))
10029 {
10030 error ("%<va_start%> used in function with fixed args");
10031 return true;
10032 }
10033
10034 if (va_start_p)
10035 {
10036 if (va_start_p && (nargs != 2))
10037 {
10038 error ("wrong number of arguments to function %<va_start%>");
10039 return true;
10040 }
10041 arg = CALL_EXPR_ARG (exp, 1);
10042 }
10043 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10044 when we checked the arguments and if needed issued a warning. */
10045 else
10046 {
10047 if (nargs == 0)
10048 {
10049 /* Evidently an out of date version of <stdarg.h>; can't validate
10050 va_start's second argument, but can still work as intended. */
10051 warning_at (current_location,
10052 OPT_Wvarargs,
10053 "%<__builtin_next_arg%> called without an argument");
10054 return true;
10055 }
10056 else if (nargs > 1)
10057 {
10058 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10059 return true;
10060 }
10061 arg = CALL_EXPR_ARG (exp, 0);
10062 }
10063
10064 if (TREE_CODE (arg) == SSA_NAME)
10065 arg = SSA_NAME_VAR (arg);
10066
10067 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10068 or __builtin_next_arg (0) the first time we see it, after checking
10069 the arguments and if needed issuing a warning. */
10070 if (!integer_zerop (arg))
10071 {
10072 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10073
10074 /* Strip off all nops for the sake of the comparison. This
10075 is not quite the same as STRIP_NOPS. It does more.
10076 We must also strip off INDIRECT_EXPR for C++ reference
10077 parameters. */
10078 while (CONVERT_EXPR_P (arg)
10079 || TREE_CODE (arg) == INDIRECT_REF)
10080 arg = TREE_OPERAND (arg, 0);
10081 if (arg != last_parm)
10082 {
10083 /* FIXME: Sometimes with the tree optimizers we can get the
10084 not the last argument even though the user used the last
10085 argument. We just warn and set the arg to be the last
10086 argument so that we will get wrong-code because of
10087 it. */
10088 warning_at (current_location,
10089 OPT_Wvarargs,
10090 "second parameter of %<va_start%> not last named argument");
10091 }
10092
10093 /* Undefined by C99 7.15.1.4p4 (va_start):
10094 "If the parameter parmN is declared with the register storage
10095 class, with a function or array type, or with a type that is
10096 not compatible with the type that results after application of
10097 the default argument promotions, the behavior is undefined."
10098 */
10099 else if (DECL_REGISTER (arg))
10100 {
10101 warning_at (current_location,
10102 OPT_Wvarargs,
10103 "undefined behaviour when second parameter of "
10104 "%<va_start%> is declared with %<register%> storage");
10105 }
10106
10107 /* We want to verify the second parameter just once before the tree
10108 optimizers are run and then avoid keeping it in the tree,
10109 as otherwise we could warn even for correct code like:
10110 void foo (int i, ...)
10111 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10112 if (va_start_p)
10113 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10114 else
10115 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10116 }
10117 return false;
10118 }
10119
10120
10121 /* Expand a call EXP to __builtin_object_size. */
10122
10123 static rtx
10124 expand_builtin_object_size (tree exp)
10125 {
10126 tree ost;
10127 int object_size_type;
10128 tree fndecl = get_callee_fndecl (exp);
10129
10130 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10131 {
10132 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10133 exp, fndecl);
10134 expand_builtin_trap ();
10135 return const0_rtx;
10136 }
10137
10138 ost = CALL_EXPR_ARG (exp, 1);
10139 STRIP_NOPS (ost);
10140
10141 if (TREE_CODE (ost) != INTEGER_CST
10142 || tree_int_cst_sgn (ost) < 0
10143 || compare_tree_int (ost, 3) > 0)
10144 {
10145 error ("%Klast argument of %D is not integer constant between 0 and 3",
10146 exp, fndecl);
10147 expand_builtin_trap ();
10148 return const0_rtx;
10149 }
10150
10151 object_size_type = tree_to_shwi (ost);
10152
10153 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10154 }
10155
10156 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10157 FCODE is the BUILT_IN_* to use.
10158 Return NULL_RTX if we failed; the caller should emit a normal call,
10159 otherwise try to get the result in TARGET, if convenient (and in
10160 mode MODE if that's convenient). */
10161
10162 static rtx
10163 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10164 enum built_in_function fcode)
10165 {
10166 tree dest, src, len, size;
10167
10168 if (!validate_arglist (exp,
10169 POINTER_TYPE,
10170 fcode == BUILT_IN_MEMSET_CHK
10171 ? INTEGER_TYPE : POINTER_TYPE,
10172 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10173 return NULL_RTX;
10174
10175 dest = CALL_EXPR_ARG (exp, 0);
10176 src = CALL_EXPR_ARG (exp, 1);
10177 len = CALL_EXPR_ARG (exp, 2);
10178 size = CALL_EXPR_ARG (exp, 3);
10179
10180 if (! tree_fits_uhwi_p (size))
10181 return NULL_RTX;
10182
10183 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10184 {
10185 tree fn;
10186
10187 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
10188 {
10189 warning_at (tree_nonartificial_location (exp),
10190 0, "%Kcall to %D will always overflow destination buffer",
10191 exp, get_callee_fndecl (exp));
10192 return NULL_RTX;
10193 }
10194
10195 fn = NULL_TREE;
10196 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10197 mem{cpy,pcpy,move,set} is available. */
10198 switch (fcode)
10199 {
10200 case BUILT_IN_MEMCPY_CHK:
10201 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10202 break;
10203 case BUILT_IN_MEMPCPY_CHK:
10204 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10205 break;
10206 case BUILT_IN_MEMMOVE_CHK:
10207 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10208 break;
10209 case BUILT_IN_MEMSET_CHK:
10210 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10211 break;
10212 default:
10213 break;
10214 }
10215
10216 if (! fn)
10217 return NULL_RTX;
10218
10219 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10220 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10221 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10222 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10223 }
10224 else if (fcode == BUILT_IN_MEMSET_CHK)
10225 return NULL_RTX;
10226 else
10227 {
10228 unsigned int dest_align = get_pointer_alignment (dest);
10229
10230 /* If DEST is not a pointer type, call the normal function. */
10231 if (dest_align == 0)
10232 return NULL_RTX;
10233
10234 /* If SRC and DEST are the same (and not volatile), do nothing. */
10235 if (operand_equal_p (src, dest, 0))
10236 {
10237 tree expr;
10238
10239 if (fcode != BUILT_IN_MEMPCPY_CHK)
10240 {
10241 /* Evaluate and ignore LEN in case it has side-effects. */
10242 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10243 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10244 }
10245
10246 expr = fold_build_pointer_plus (dest, len);
10247 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10248 }
10249
10250 /* __memmove_chk special case. */
10251 if (fcode == BUILT_IN_MEMMOVE_CHK)
10252 {
10253 unsigned int src_align = get_pointer_alignment (src);
10254
10255 if (src_align == 0)
10256 return NULL_RTX;
10257
10258 /* If src is categorized for a readonly section we can use
10259 normal __memcpy_chk. */
10260 if (readonly_data_expr (src))
10261 {
10262 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10263 if (!fn)
10264 return NULL_RTX;
10265 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10266 dest, src, len, size);
10267 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10268 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10269 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10270 }
10271 }
10272 return NULL_RTX;
10273 }
10274 }
10275
10276 /* Emit warning if a buffer overflow is detected at compile time. */
10277
10278 static void
10279 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10280 {
10281 int is_strlen = 0;
10282 tree len, size;
10283 location_t loc = tree_nonartificial_location (exp);
10284
10285 switch (fcode)
10286 {
10287 case BUILT_IN_STRCPY_CHK:
10288 case BUILT_IN_STPCPY_CHK:
10289 /* For __strcat_chk the warning will be emitted only if overflowing
10290 by at least strlen (dest) + 1 bytes. */
10291 case BUILT_IN_STRCAT_CHK:
10292 len = CALL_EXPR_ARG (exp, 1);
10293 size = CALL_EXPR_ARG (exp, 2);
10294 is_strlen = 1;
10295 break;
10296 case BUILT_IN_STRNCAT_CHK:
10297 case BUILT_IN_STRNCPY_CHK:
10298 case BUILT_IN_STPNCPY_CHK:
10299 len = CALL_EXPR_ARG (exp, 2);
10300 size = CALL_EXPR_ARG (exp, 3);
10301 break;
10302 case BUILT_IN_SNPRINTF_CHK:
10303 case BUILT_IN_VSNPRINTF_CHK:
10304 len = CALL_EXPR_ARG (exp, 1);
10305 size = CALL_EXPR_ARG (exp, 3);
10306 break;
10307 default:
10308 gcc_unreachable ();
10309 }
10310
10311 if (!len || !size)
10312 return;
10313
10314 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10315 return;
10316
10317 if (is_strlen)
10318 {
10319 len = c_strlen (len, 1);
10320 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10321 return;
10322 }
10323 else if (fcode == BUILT_IN_STRNCAT_CHK)
10324 {
10325 tree src = CALL_EXPR_ARG (exp, 1);
10326 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10327 return;
10328 src = c_strlen (src, 1);
10329 if (! src || ! tree_fits_uhwi_p (src))
10330 {
10331 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
10332 exp, get_callee_fndecl (exp));
10333 return;
10334 }
10335 else if (tree_int_cst_lt (src, size))
10336 return;
10337 }
10338 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
10339 return;
10340
10341 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
10342 exp, get_callee_fndecl (exp));
10343 }
10344
10345 /* Emit warning if a buffer overflow is detected at compile time
10346 in __sprintf_chk/__vsprintf_chk calls. */
10347
10348 static void
10349 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10350 {
10351 tree size, len, fmt;
10352 const char *fmt_str;
10353 int nargs = call_expr_nargs (exp);
10354
10355 /* Verify the required arguments in the original call. */
10356
10357 if (nargs < 4)
10358 return;
10359 size = CALL_EXPR_ARG (exp, 2);
10360 fmt = CALL_EXPR_ARG (exp, 3);
10361
10362 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10363 return;
10364
10365 /* Check whether the format is a literal string constant. */
10366 fmt_str = c_getstr (fmt);
10367 if (fmt_str == NULL)
10368 return;
10369
10370 if (!init_target_chars ())
10371 return;
10372
10373 /* If the format doesn't contain % args or %%, we know its size. */
10374 if (strchr (fmt_str, target_percent) == 0)
10375 len = build_int_cstu (size_type_node, strlen (fmt_str));
10376 /* If the format is "%s" and first ... argument is a string literal,
10377 we know it too. */
10378 else if (fcode == BUILT_IN_SPRINTF_CHK
10379 && strcmp (fmt_str, target_percent_s) == 0)
10380 {
10381 tree arg;
10382
10383 if (nargs < 5)
10384 return;
10385 arg = CALL_EXPR_ARG (exp, 4);
10386 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10387 return;
10388
10389 len = c_strlen (arg, 1);
10390 if (!len || ! tree_fits_uhwi_p (len))
10391 return;
10392 }
10393 else
10394 return;
10395
10396 if (! tree_int_cst_lt (len, size))
10397 warning_at (tree_nonartificial_location (exp),
10398 0, "%Kcall to %D will always overflow destination buffer",
10399 exp, get_callee_fndecl (exp));
10400 }
10401
10402 /* Emit warning if a free is called with address of a variable. */
10403
10404 static void
10405 maybe_emit_free_warning (tree exp)
10406 {
10407 tree arg = CALL_EXPR_ARG (exp, 0);
10408
10409 STRIP_NOPS (arg);
10410 if (TREE_CODE (arg) != ADDR_EXPR)
10411 return;
10412
10413 arg = get_base_address (TREE_OPERAND (arg, 0));
10414 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10415 return;
10416
10417 if (SSA_VAR_P (arg))
10418 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10419 "%Kattempt to free a non-heap object %qD", exp, arg);
10420 else
10421 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10422 "%Kattempt to free a non-heap object", exp);
10423 }
10424
10425 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10426 if possible. */
10427
10428 static tree
10429 fold_builtin_object_size (tree ptr, tree ost)
10430 {
10431 unsigned HOST_WIDE_INT bytes;
10432 int object_size_type;
10433
10434 if (!validate_arg (ptr, POINTER_TYPE)
10435 || !validate_arg (ost, INTEGER_TYPE))
10436 return NULL_TREE;
10437
10438 STRIP_NOPS (ost);
10439
10440 if (TREE_CODE (ost) != INTEGER_CST
10441 || tree_int_cst_sgn (ost) < 0
10442 || compare_tree_int (ost, 3) > 0)
10443 return NULL_TREE;
10444
10445 object_size_type = tree_to_shwi (ost);
10446
10447 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10448 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10449 and (size_t) 0 for types 2 and 3. */
10450 if (TREE_SIDE_EFFECTS (ptr))
10451 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10452
10453 if (TREE_CODE (ptr) == ADDR_EXPR)
10454 {
10455 bytes = compute_builtin_object_size (ptr, object_size_type);
10456 if (wi::fits_to_tree_p (bytes, size_type_node))
10457 return build_int_cstu (size_type_node, bytes);
10458 }
10459 else if (TREE_CODE (ptr) == SSA_NAME)
10460 {
10461 /* If object size is not known yet, delay folding until
10462 later. Maybe subsequent passes will help determining
10463 it. */
10464 bytes = compute_builtin_object_size (ptr, object_size_type);
10465 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
10466 && wi::fits_to_tree_p (bytes, size_type_node))
10467 return build_int_cstu (size_type_node, bytes);
10468 }
10469
10470 return NULL_TREE;
10471 }
10472
10473 /* Builtins with folding operations that operate on "..." arguments
10474 need special handling; we need to store the arguments in a convenient
10475 data structure before attempting any folding. Fortunately there are
10476 only a few builtins that fall into this category. FNDECL is the
10477 function, EXP is the CALL_EXPR for the call. */
10478
10479 static tree
10480 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10481 {
10482 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10483 tree ret = NULL_TREE;
10484
10485 switch (fcode)
10486 {
10487 case BUILT_IN_FPCLASSIFY:
10488 ret = fold_builtin_fpclassify (loc, args, nargs);
10489 break;
10490
10491 default:
10492 break;
10493 }
10494 if (ret)
10495 {
10496 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10497 SET_EXPR_LOCATION (ret, loc);
10498 TREE_NO_WARNING (ret) = 1;
10499 return ret;
10500 }
10501 return NULL_TREE;
10502 }
10503
10504 /* Initialize format string characters in the target charset. */
10505
10506 bool
10507 init_target_chars (void)
10508 {
10509 static bool init;
10510 if (!init)
10511 {
10512 target_newline = lang_hooks.to_target_charset ('\n');
10513 target_percent = lang_hooks.to_target_charset ('%');
10514 target_c = lang_hooks.to_target_charset ('c');
10515 target_s = lang_hooks.to_target_charset ('s');
10516 if (target_newline == 0 || target_percent == 0 || target_c == 0
10517 || target_s == 0)
10518 return false;
10519
10520 target_percent_c[0] = target_percent;
10521 target_percent_c[1] = target_c;
10522 target_percent_c[2] = '\0';
10523
10524 target_percent_s[0] = target_percent;
10525 target_percent_s[1] = target_s;
10526 target_percent_s[2] = '\0';
10527
10528 target_percent_s_newline[0] = target_percent;
10529 target_percent_s_newline[1] = target_s;
10530 target_percent_s_newline[2] = target_newline;
10531 target_percent_s_newline[3] = '\0';
10532
10533 init = true;
10534 }
10535 return true;
10536 }
10537
10538 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10539 and no overflow/underflow occurred. INEXACT is true if M was not
10540 exactly calculated. TYPE is the tree type for the result. This
10541 function assumes that you cleared the MPFR flags and then
10542 calculated M to see if anything subsequently set a flag prior to
10543 entering this function. Return NULL_TREE if any checks fail. */
10544
10545 static tree
10546 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10547 {
10548 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10549 overflow/underflow occurred. If -frounding-math, proceed iff the
10550 result of calling FUNC was exact. */
10551 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10552 && (!flag_rounding_math || !inexact))
10553 {
10554 REAL_VALUE_TYPE rr;
10555
10556 real_from_mpfr (&rr, m, type, GMP_RNDN);
10557 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10558 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10559 but the mpft_t is not, then we underflowed in the
10560 conversion. */
10561 if (real_isfinite (&rr)
10562 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10563 {
10564 REAL_VALUE_TYPE rmode;
10565
10566 real_convert (&rmode, TYPE_MODE (type), &rr);
10567 /* Proceed iff the specified mode can hold the value. */
10568 if (real_identical (&rmode, &rr))
10569 return build_real (type, rmode);
10570 }
10571 }
10572 return NULL_TREE;
10573 }
10574
10575 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10576 number and no overflow/underflow occurred. INEXACT is true if M
10577 was not exactly calculated. TYPE is the tree type for the result.
10578 This function assumes that you cleared the MPFR flags and then
10579 calculated M to see if anything subsequently set a flag prior to
10580 entering this function. Return NULL_TREE if any checks fail, if
10581 FORCE_CONVERT is true, then bypass the checks. */
10582
10583 static tree
10584 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10585 {
10586 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10587 overflow/underflow occurred. If -frounding-math, proceed iff the
10588 result of calling FUNC was exact. */
10589 if (force_convert
10590 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10591 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10592 && (!flag_rounding_math || !inexact)))
10593 {
10594 REAL_VALUE_TYPE re, im;
10595
10596 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10597 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10598 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10599 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10600 but the mpft_t is not, then we underflowed in the
10601 conversion. */
10602 if (force_convert
10603 || (real_isfinite (&re) && real_isfinite (&im)
10604 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10605 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10606 {
10607 REAL_VALUE_TYPE re_mode, im_mode;
10608
10609 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10610 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10611 /* Proceed iff the specified mode can hold the value. */
10612 if (force_convert
10613 || (real_identical (&re_mode, &re)
10614 && real_identical (&im_mode, &im)))
10615 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10616 build_real (TREE_TYPE (type), im_mode));
10617 }
10618 }
10619 return NULL_TREE;
10620 }
10621
10622 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
10623 FUNC on it and return the resulting value as a tree with type TYPE.
10624 If MIN and/or MAX are not NULL, then the supplied ARG must be
10625 within those bounds. If INCLUSIVE is true, then MIN/MAX are
10626 acceptable values, otherwise they are not. The mpfr precision is
10627 set to the precision of TYPE. We assume that function FUNC returns
10628 zero if the result could be calculated exactly within the requested
10629 precision. */
10630
10631 static tree
10632 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
10633 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
10634 bool inclusive)
10635 {
10636 tree result = NULL_TREE;
10637
10638 STRIP_NOPS (arg);
10639
10640 /* To proceed, MPFR must exactly represent the target floating point
10641 format, which only happens when the target base equals two. */
10642 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10643 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
10644 {
10645 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
10646
10647 if (real_isfinite (ra)
10648 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
10649 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
10650 {
10651 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10652 const int prec = fmt->p;
10653 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10654 int inexact;
10655 mpfr_t m;
10656
10657 mpfr_init2 (m, prec);
10658 mpfr_from_real (m, ra, GMP_RNDN);
10659 mpfr_clear_flags ();
10660 inexact = func (m, m, rnd);
10661 result = do_mpfr_ckconv (m, type, inexact);
10662 mpfr_clear (m);
10663 }
10664 }
10665
10666 return result;
10667 }
10668
10669 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
10670 FUNC on it and return the resulting value as a tree with type TYPE.
10671 The mpfr precision is set to the precision of TYPE. We assume that
10672 function FUNC returns zero if the result could be calculated
10673 exactly within the requested precision. */
10674
10675 static tree
10676 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
10677 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
10678 {
10679 tree result = NULL_TREE;
10680
10681 STRIP_NOPS (arg1);
10682 STRIP_NOPS (arg2);
10683
10684 /* To proceed, MPFR must exactly represent the target floating point
10685 format, which only happens when the target base equals two. */
10686 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10687 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
10688 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
10689 {
10690 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
10691 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
10692
10693 if (real_isfinite (ra1) && real_isfinite (ra2))
10694 {
10695 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10696 const int prec = fmt->p;
10697 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10698 int inexact;
10699 mpfr_t m1, m2;
10700
10701 mpfr_inits2 (prec, m1, m2, NULL);
10702 mpfr_from_real (m1, ra1, GMP_RNDN);
10703 mpfr_from_real (m2, ra2, GMP_RNDN);
10704 mpfr_clear_flags ();
10705 inexact = func (m1, m1, m2, rnd);
10706 result = do_mpfr_ckconv (m1, type, inexact);
10707 mpfr_clears (m1, m2, NULL);
10708 }
10709 }
10710
10711 return result;
10712 }
10713
10714 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
10715 FUNC on it and return the resulting value as a tree with type TYPE.
10716 The mpfr precision is set to the precision of TYPE. We assume that
10717 function FUNC returns zero if the result could be calculated
10718 exactly within the requested precision. */
10719
10720 static tree
10721 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
10722 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
10723 {
10724 tree result = NULL_TREE;
10725
10726 STRIP_NOPS (arg1);
10727 STRIP_NOPS (arg2);
10728 STRIP_NOPS (arg3);
10729
10730 /* To proceed, MPFR must exactly represent the target floating point
10731 format, which only happens when the target base equals two. */
10732 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10733 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
10734 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
10735 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
10736 {
10737 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
10738 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
10739 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
10740
10741 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
10742 {
10743 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10744 const int prec = fmt->p;
10745 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10746 int inexact;
10747 mpfr_t m1, m2, m3;
10748
10749 mpfr_inits2 (prec, m1, m2, m3, NULL);
10750 mpfr_from_real (m1, ra1, GMP_RNDN);
10751 mpfr_from_real (m2, ra2, GMP_RNDN);
10752 mpfr_from_real (m3, ra3, GMP_RNDN);
10753 mpfr_clear_flags ();
10754 inexact = func (m1, m1, m2, m3, rnd);
10755 result = do_mpfr_ckconv (m1, type, inexact);
10756 mpfr_clears (m1, m2, m3, NULL);
10757 }
10758 }
10759
10760 return result;
10761 }
10762
10763 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
10764 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
10765 If ARG_SINP and ARG_COSP are NULL then the result is returned
10766 as a complex value.
10767 The type is taken from the type of ARG and is used for setting the
10768 precision of the calculation and results. */
10769
10770 static tree
10771 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
10772 {
10773 tree const type = TREE_TYPE (arg);
10774 tree result = NULL_TREE;
10775
10776 STRIP_NOPS (arg);
10777
10778 /* To proceed, MPFR must exactly represent the target floating point
10779 format, which only happens when the target base equals two. */
10780 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10781 && TREE_CODE (arg) == REAL_CST
10782 && !TREE_OVERFLOW (arg))
10783 {
10784 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
10785
10786 if (real_isfinite (ra))
10787 {
10788 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10789 const int prec = fmt->p;
10790 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10791 tree result_s, result_c;
10792 int inexact;
10793 mpfr_t m, ms, mc;
10794
10795 mpfr_inits2 (prec, m, ms, mc, NULL);
10796 mpfr_from_real (m, ra, GMP_RNDN);
10797 mpfr_clear_flags ();
10798 inexact = mpfr_sin_cos (ms, mc, m, rnd);
10799 result_s = do_mpfr_ckconv (ms, type, inexact);
10800 result_c = do_mpfr_ckconv (mc, type, inexact);
10801 mpfr_clears (m, ms, mc, NULL);
10802 if (result_s && result_c)
10803 {
10804 /* If we are to return in a complex value do so. */
10805 if (!arg_sinp && !arg_cosp)
10806 return build_complex (build_complex_type (type),
10807 result_c, result_s);
10808
10809 /* Dereference the sin/cos pointer arguments. */
10810 arg_sinp = build_fold_indirect_ref (arg_sinp);
10811 arg_cosp = build_fold_indirect_ref (arg_cosp);
10812 /* Proceed if valid pointer type were passed in. */
10813 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
10814 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
10815 {
10816 /* Set the values. */
10817 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
10818 result_s);
10819 TREE_SIDE_EFFECTS (result_s) = 1;
10820 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
10821 result_c);
10822 TREE_SIDE_EFFECTS (result_c) = 1;
10823 /* Combine the assignments into a compound expr. */
10824 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10825 result_s, result_c));
10826 }
10827 }
10828 }
10829 }
10830 return result;
10831 }
10832
10833 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
10834 two-argument mpfr order N Bessel function FUNC on them and return
10835 the resulting value as a tree with type TYPE. The mpfr precision
10836 is set to the precision of TYPE. We assume that function FUNC
10837 returns zero if the result could be calculated exactly within the
10838 requested precision. */
10839 static tree
10840 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
10841 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
10842 const REAL_VALUE_TYPE *min, bool inclusive)
10843 {
10844 tree result = NULL_TREE;
10845
10846 STRIP_NOPS (arg1);
10847 STRIP_NOPS (arg2);
10848
10849 /* To proceed, MPFR must exactly represent the target floating point
10850 format, which only happens when the target base equals two. */
10851 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10852 && tree_fits_shwi_p (arg1)
10853 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
10854 {
10855 const HOST_WIDE_INT n = tree_to_shwi (arg1);
10856 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
10857
10858 if (n == (long)n
10859 && real_isfinite (ra)
10860 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
10861 {
10862 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10863 const int prec = fmt->p;
10864 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10865 int inexact;
10866 mpfr_t m;
10867
10868 mpfr_init2 (m, prec);
10869 mpfr_from_real (m, ra, GMP_RNDN);
10870 mpfr_clear_flags ();
10871 inexact = func (m, n, m, rnd);
10872 result = do_mpfr_ckconv (m, type, inexact);
10873 mpfr_clear (m);
10874 }
10875 }
10876
10877 return result;
10878 }
10879
10880 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10881 the pointer *(ARG_QUO) and return the result. The type is taken
10882 from the type of ARG0 and is used for setting the precision of the
10883 calculation and results. */
10884
10885 static tree
10886 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10887 {
10888 tree const type = TREE_TYPE (arg0);
10889 tree result = NULL_TREE;
10890
10891 STRIP_NOPS (arg0);
10892 STRIP_NOPS (arg1);
10893
10894 /* To proceed, MPFR must exactly represent the target floating point
10895 format, which only happens when the target base equals two. */
10896 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10897 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10898 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10899 {
10900 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10901 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10902
10903 if (real_isfinite (ra0) && real_isfinite (ra1))
10904 {
10905 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10906 const int prec = fmt->p;
10907 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10908 tree result_rem;
10909 long integer_quo;
10910 mpfr_t m0, m1;
10911
10912 mpfr_inits2 (prec, m0, m1, NULL);
10913 mpfr_from_real (m0, ra0, GMP_RNDN);
10914 mpfr_from_real (m1, ra1, GMP_RNDN);
10915 mpfr_clear_flags ();
10916 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10917 /* Remquo is independent of the rounding mode, so pass
10918 inexact=0 to do_mpfr_ckconv(). */
10919 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10920 mpfr_clears (m0, m1, NULL);
10921 if (result_rem)
10922 {
10923 /* MPFR calculates quo in the host's long so it may
10924 return more bits in quo than the target int can hold
10925 if sizeof(host long) > sizeof(target int). This can
10926 happen even for native compilers in LP64 mode. In
10927 these cases, modulo the quo value with the largest
10928 number that the target int can hold while leaving one
10929 bit for the sign. */
10930 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10931 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10932
10933 /* Dereference the quo pointer argument. */
10934 arg_quo = build_fold_indirect_ref (arg_quo);
10935 /* Proceed iff a valid pointer type was passed in. */
10936 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10937 {
10938 /* Set the value. */
10939 tree result_quo
10940 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10941 build_int_cst (TREE_TYPE (arg_quo),
10942 integer_quo));
10943 TREE_SIDE_EFFECTS (result_quo) = 1;
10944 /* Combine the quo assignment with the rem. */
10945 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10946 result_quo, result_rem));
10947 }
10948 }
10949 }
10950 }
10951 return result;
10952 }
10953
10954 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10955 resulting value as a tree with type TYPE. The mpfr precision is
10956 set to the precision of TYPE. We assume that this mpfr function
10957 returns zero if the result could be calculated exactly within the
10958 requested precision. In addition, the integer pointer represented
10959 by ARG_SG will be dereferenced and set to the appropriate signgam
10960 (-1,1) value. */
10961
10962 static tree
10963 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10964 {
10965 tree result = NULL_TREE;
10966
10967 STRIP_NOPS (arg);
10968
10969 /* To proceed, MPFR must exactly represent the target floating point
10970 format, which only happens when the target base equals two. Also
10971 verify ARG is a constant and that ARG_SG is an int pointer. */
10972 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10973 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10974 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10975 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10976 {
10977 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10978
10979 /* In addition to NaN and Inf, the argument cannot be zero or a
10980 negative integer. */
10981 if (real_isfinite (ra)
10982 && ra->cl != rvc_zero
10983 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10984 {
10985 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10986 const int prec = fmt->p;
10987 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10988 int inexact, sg;
10989 mpfr_t m;
10990 tree result_lg;
10991
10992 mpfr_init2 (m, prec);
10993 mpfr_from_real (m, ra, GMP_RNDN);
10994 mpfr_clear_flags ();
10995 inexact = mpfr_lgamma (m, &sg, m, rnd);
10996 result_lg = do_mpfr_ckconv (m, type, inexact);
10997 mpfr_clear (m);
10998 if (result_lg)
10999 {
11000 tree result_sg;
11001
11002 /* Dereference the arg_sg pointer argument. */
11003 arg_sg = build_fold_indirect_ref (arg_sg);
11004 /* Assign the signgam value into *arg_sg. */
11005 result_sg = fold_build2 (MODIFY_EXPR,
11006 TREE_TYPE (arg_sg), arg_sg,
11007 build_int_cst (TREE_TYPE (arg_sg), sg));
11008 TREE_SIDE_EFFECTS (result_sg) = 1;
11009 /* Combine the signgam assignment with the lgamma result. */
11010 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11011 result_sg, result_lg));
11012 }
11013 }
11014 }
11015
11016 return result;
11017 }
11018
11019 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11020 function FUNC on it and return the resulting value as a tree with
11021 type TYPE. The mpfr precision is set to the precision of TYPE. We
11022 assume that function FUNC returns zero if the result could be
11023 calculated exactly within the requested precision. */
11024
11025 static tree
11026 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
11027 {
11028 tree result = NULL_TREE;
11029
11030 STRIP_NOPS (arg);
11031
11032 /* To proceed, MPFR must exactly represent the target floating point
11033 format, which only happens when the target base equals two. */
11034 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
11035 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
11036 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
11037 {
11038 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
11039 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
11040
11041 if (real_isfinite (re) && real_isfinite (im))
11042 {
11043 const struct real_format *const fmt =
11044 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11045 const int prec = fmt->p;
11046 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11047 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11048 int inexact;
11049 mpc_t m;
11050
11051 mpc_init2 (m, prec);
11052 mpfr_from_real (mpc_realref (m), re, rnd);
11053 mpfr_from_real (mpc_imagref (m), im, rnd);
11054 mpfr_clear_flags ();
11055 inexact = func (m, m, crnd);
11056 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
11057 mpc_clear (m);
11058 }
11059 }
11060
11061 return result;
11062 }
11063
11064 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11065 mpc function FUNC on it and return the resulting value as a tree
11066 with type TYPE. The mpfr precision is set to the precision of
11067 TYPE. We assume that function FUNC returns zero if the result
11068 could be calculated exactly within the requested precision. If
11069 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11070 in the arguments and/or results. */
11071
11072 tree
11073 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11074 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11075 {
11076 tree result = NULL_TREE;
11077
11078 STRIP_NOPS (arg0);
11079 STRIP_NOPS (arg1);
11080
11081 /* To proceed, MPFR must exactly represent the target floating point
11082 format, which only happens when the target base equals two. */
11083 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11084 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11085 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11086 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11087 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11088 {
11089 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11090 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11091 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11092 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11093
11094 if (do_nonfinite
11095 || (real_isfinite (re0) && real_isfinite (im0)
11096 && real_isfinite (re1) && real_isfinite (im1)))
11097 {
11098 const struct real_format *const fmt =
11099 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11100 const int prec = fmt->p;
11101 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11102 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11103 int inexact;
11104 mpc_t m0, m1;
11105
11106 mpc_init2 (m0, prec);
11107 mpc_init2 (m1, prec);
11108 mpfr_from_real (mpc_realref (m0), re0, rnd);
11109 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11110 mpfr_from_real (mpc_realref (m1), re1, rnd);
11111 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11112 mpfr_clear_flags ();
11113 inexact = func (m0, m0, m1, crnd);
11114 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11115 mpc_clear (m0);
11116 mpc_clear (m1);
11117 }
11118 }
11119
11120 return result;
11121 }
11122
11123 /* A wrapper function for builtin folding that prevents warnings for
11124 "statement without effect" and the like, caused by removing the
11125 call node earlier than the warning is generated. */
11126
11127 tree
11128 fold_call_stmt (gcall *stmt, bool ignore)
11129 {
11130 tree ret = NULL_TREE;
11131 tree fndecl = gimple_call_fndecl (stmt);
11132 location_t loc = gimple_location (stmt);
11133 if (fndecl
11134 && TREE_CODE (fndecl) == FUNCTION_DECL
11135 && DECL_BUILT_IN (fndecl)
11136 && !gimple_call_va_arg_pack_p (stmt))
11137 {
11138 int nargs = gimple_call_num_args (stmt);
11139 tree *args = (nargs > 0
11140 ? gimple_call_arg_ptr (stmt, 0)
11141 : &error_mark_node);
11142
11143 if (avoid_folding_inline_builtin (fndecl))
11144 return NULL_TREE;
11145 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11146 {
11147 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11148 }
11149 else
11150 {
11151 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11152 if (ret)
11153 {
11154 /* Propagate location information from original call to
11155 expansion of builtin. Otherwise things like
11156 maybe_emit_chk_warning, that operate on the expansion
11157 of a builtin, will use the wrong location information. */
11158 if (gimple_has_location (stmt))
11159 {
11160 tree realret = ret;
11161 if (TREE_CODE (ret) == NOP_EXPR)
11162 realret = TREE_OPERAND (ret, 0);
11163 if (CAN_HAVE_LOCATION_P (realret)
11164 && !EXPR_HAS_LOCATION (realret))
11165 SET_EXPR_LOCATION (realret, loc);
11166 return realret;
11167 }
11168 return ret;
11169 }
11170 }
11171 }
11172 return NULL_TREE;
11173 }
11174
11175 /* Look up the function in builtin_decl that corresponds to DECL
11176 and set ASMSPEC as its user assembler name. DECL must be a
11177 function decl that declares a builtin. */
11178
11179 void
11180 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11181 {
11182 tree builtin;
11183 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
11184 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
11185 && asmspec != 0);
11186
11187 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11188 set_user_assembler_name (builtin, asmspec);
11189 switch (DECL_FUNCTION_CODE (decl))
11190 {
11191 case BUILT_IN_MEMCPY:
11192 init_block_move_fn (asmspec);
11193 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
11194 break;
11195 case BUILT_IN_MEMSET:
11196 init_block_clear_fn (asmspec);
11197 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
11198 break;
11199 case BUILT_IN_MEMMOVE:
11200 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
11201 break;
11202 case BUILT_IN_MEMCMP:
11203 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
11204 break;
11205 case BUILT_IN_ABORT:
11206 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
11207 break;
11208 case BUILT_IN_FFS:
11209 if (INT_TYPE_SIZE < BITS_PER_WORD)
11210 {
11211 set_user_assembler_libfunc ("ffs", asmspec);
11212 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
11213 MODE_INT, 0), "ffs");
11214 }
11215 break;
11216 default:
11217 break;
11218 }
11219 }
11220
11221 /* Return true if DECL is a builtin that expands to a constant or similarly
11222 simple code. */
11223 bool
11224 is_simple_builtin (tree decl)
11225 {
11226 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11227 switch (DECL_FUNCTION_CODE (decl))
11228 {
11229 /* Builtins that expand to constants. */
11230 case BUILT_IN_CONSTANT_P:
11231 case BUILT_IN_EXPECT:
11232 case BUILT_IN_OBJECT_SIZE:
11233 case BUILT_IN_UNREACHABLE:
11234 /* Simple register moves or loads from stack. */
11235 case BUILT_IN_ASSUME_ALIGNED:
11236 case BUILT_IN_RETURN_ADDRESS:
11237 case BUILT_IN_EXTRACT_RETURN_ADDR:
11238 case BUILT_IN_FROB_RETURN_ADDR:
11239 case BUILT_IN_RETURN:
11240 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11241 case BUILT_IN_FRAME_ADDRESS:
11242 case BUILT_IN_VA_END:
11243 case BUILT_IN_STACK_SAVE:
11244 case BUILT_IN_STACK_RESTORE:
11245 /* Exception state returns or moves registers around. */
11246 case BUILT_IN_EH_FILTER:
11247 case BUILT_IN_EH_POINTER:
11248 case BUILT_IN_EH_COPY_VALUES:
11249 return true;
11250
11251 default:
11252 return false;
11253 }
11254
11255 return false;
11256 }
11257
11258 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11259 most probably expanded inline into reasonably simple code. This is a
11260 superset of is_simple_builtin. */
11261 bool
11262 is_inexpensive_builtin (tree decl)
11263 {
11264 if (!decl)
11265 return false;
11266 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11267 return true;
11268 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11269 switch (DECL_FUNCTION_CODE (decl))
11270 {
11271 case BUILT_IN_ABS:
11272 case BUILT_IN_ALLOCA:
11273 case BUILT_IN_ALLOCA_WITH_ALIGN:
11274 case BUILT_IN_BSWAP16:
11275 case BUILT_IN_BSWAP32:
11276 case BUILT_IN_BSWAP64:
11277 case BUILT_IN_CLZ:
11278 case BUILT_IN_CLZIMAX:
11279 case BUILT_IN_CLZL:
11280 case BUILT_IN_CLZLL:
11281 case BUILT_IN_CTZ:
11282 case BUILT_IN_CTZIMAX:
11283 case BUILT_IN_CTZL:
11284 case BUILT_IN_CTZLL:
11285 case BUILT_IN_FFS:
11286 case BUILT_IN_FFSIMAX:
11287 case BUILT_IN_FFSL:
11288 case BUILT_IN_FFSLL:
11289 case BUILT_IN_IMAXABS:
11290 case BUILT_IN_FINITE:
11291 case BUILT_IN_FINITEF:
11292 case BUILT_IN_FINITEL:
11293 case BUILT_IN_FINITED32:
11294 case BUILT_IN_FINITED64:
11295 case BUILT_IN_FINITED128:
11296 case BUILT_IN_FPCLASSIFY:
11297 case BUILT_IN_ISFINITE:
11298 case BUILT_IN_ISINF_SIGN:
11299 case BUILT_IN_ISINF:
11300 case BUILT_IN_ISINFF:
11301 case BUILT_IN_ISINFL:
11302 case BUILT_IN_ISINFD32:
11303 case BUILT_IN_ISINFD64:
11304 case BUILT_IN_ISINFD128:
11305 case BUILT_IN_ISNAN:
11306 case BUILT_IN_ISNANF:
11307 case BUILT_IN_ISNANL:
11308 case BUILT_IN_ISNAND32:
11309 case BUILT_IN_ISNAND64:
11310 case BUILT_IN_ISNAND128:
11311 case BUILT_IN_ISNORMAL:
11312 case BUILT_IN_ISGREATER:
11313 case BUILT_IN_ISGREATEREQUAL:
11314 case BUILT_IN_ISLESS:
11315 case BUILT_IN_ISLESSEQUAL:
11316 case BUILT_IN_ISLESSGREATER:
11317 case BUILT_IN_ISUNORDERED:
11318 case BUILT_IN_VA_ARG_PACK:
11319 case BUILT_IN_VA_ARG_PACK_LEN:
11320 case BUILT_IN_VA_COPY:
11321 case BUILT_IN_TRAP:
11322 case BUILT_IN_SAVEREGS:
11323 case BUILT_IN_POPCOUNTL:
11324 case BUILT_IN_POPCOUNTLL:
11325 case BUILT_IN_POPCOUNTIMAX:
11326 case BUILT_IN_POPCOUNT:
11327 case BUILT_IN_PARITYL:
11328 case BUILT_IN_PARITYLL:
11329 case BUILT_IN_PARITYIMAX:
11330 case BUILT_IN_PARITY:
11331 case BUILT_IN_LABS:
11332 case BUILT_IN_LLABS:
11333 case BUILT_IN_PREFETCH:
11334 case BUILT_IN_ACC_ON_DEVICE:
11335 return true;
11336
11337 default:
11338 return is_simple_builtin (decl);
11339 }
11340
11341 return false;
11342 }