Move cabs simplifications to match.pd
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "predict.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "rtl.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "tree-object-size.h"
39 #include "realmpfr.h"
40 #include "cfgrtl.h"
41 #include "internal-fn.h"
42 #include "flags.h"
43 #include "regs.h"
44 #include "except.h"
45 #include "insn-config.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "emit-rtl.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "insn-codes.h"
53 #include "optabs.h"
54 #include "libfuncs.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
62 #include "tree-dfa.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
65 #include "builtins.h"
66 #include "asan.h"
67 #include "cilk.h"
68 #include "cgraph.h"
69 #include "tree-chkp.h"
70 #include "rtl-chkp.h"
71
72
73 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89 #undef DEF_BUILTIN
90
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
97
98 static rtx c_readstr (const char *, machine_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static void expand_errno_check (tree, rtx);
112 static rtx expand_builtin_mathfn (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
114 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
115 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
116 static rtx expand_builtin_interclass_mathfn (tree, rtx);
117 static rtx expand_builtin_sincos (tree);
118 static rtx expand_builtin_cexpi (tree, rtx);
119 static rtx expand_builtin_int_roundingfn (tree, rtx);
120 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
121 static rtx expand_builtin_next_arg (void);
122 static rtx expand_builtin_va_start (tree);
123 static rtx expand_builtin_va_end (tree);
124 static rtx expand_builtin_va_copy (tree);
125 static rtx expand_builtin_strcmp (tree, rtx);
126 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
128 static rtx expand_builtin_memcpy (tree, rtx);
129 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
131 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
134 machine_mode, int, tree);
135 static rtx expand_builtin_strcpy (tree, rtx);
136 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
137 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree, bool);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_tan (tree, tree);
164 static tree fold_builtin_trunc (location_t, tree, tree);
165 static tree fold_builtin_floor (location_t, tree, tree);
166 static tree fold_builtin_ceil (location_t, tree, tree);
167 static tree fold_builtin_round (location_t, tree, tree);
168 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
169 static tree fold_builtin_bitop (tree, tree);
170 static tree fold_builtin_strchr (location_t, tree, tree, tree);
171 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
173 static tree fold_builtin_strcmp (location_t, tree, tree);
174 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
175 static tree fold_builtin_signbit (location_t, tree, tree);
176 static tree fold_builtin_isascii (location_t, tree);
177 static tree fold_builtin_toascii (location_t, tree);
178 static tree fold_builtin_isdigit (location_t, tree);
179 static tree fold_builtin_fabs (location_t, tree, tree);
180 static tree fold_builtin_abs (location_t, tree, tree);
181 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
182 enum tree_code);
183 static tree fold_builtin_0 (location_t, tree);
184 static tree fold_builtin_1 (location_t, tree, tree);
185 static tree fold_builtin_2 (location_t, tree, tree, tree);
186 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
187 static tree fold_builtin_varargs (location_t, tree, tree*, int);
188
189 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
190 static tree fold_builtin_strstr (location_t, tree, tree, tree);
191 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
192 static tree fold_builtin_strspn (location_t, tree, tree);
193 static tree fold_builtin_strcspn (location_t, tree, tree);
194
195 static rtx expand_builtin_object_size (tree);
196 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
197 enum built_in_function);
198 static void maybe_emit_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_free_warning (tree);
201 static tree fold_builtin_object_size (tree, tree);
202
203 unsigned HOST_WIDE_INT target_newline;
204 unsigned HOST_WIDE_INT target_percent;
205 static unsigned HOST_WIDE_INT target_c;
206 static unsigned HOST_WIDE_INT target_s;
207 char target_percent_c[3];
208 char target_percent_s[3];
209 char target_percent_s_newline[4];
210 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
211 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
212 static tree do_mpfr_arg2 (tree, tree, tree,
213 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
214 static tree do_mpfr_arg3 (tree, tree, tree, tree,
215 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
216 static tree do_mpfr_sincos (tree, tree, tree);
217 static tree do_mpfr_bessel_n (tree, tree, tree,
218 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_remquo (tree, tree, tree);
221 static tree do_mpfr_lgamma_r (tree, tree, tree);
222 static void expand_builtin_sync_synchronize (void);
223
224 /* Return true if NAME starts with __builtin_ or __sync_. */
225
226 static bool
227 is_builtin_name (const char *name)
228 {
229 if (strncmp (name, "__builtin_", 10) == 0)
230 return true;
231 if (strncmp (name, "__sync_", 7) == 0)
232 return true;
233 if (strncmp (name, "__atomic_", 9) == 0)
234 return true;
235 if (flag_cilkplus
236 && (!strcmp (name, "__cilkrts_detach")
237 || !strcmp (name, "__cilkrts_pop_frame")))
238 return true;
239 return false;
240 }
241
242
243 /* Return true if DECL is a function symbol representing a built-in. */
244
245 bool
246 is_builtin_fn (tree decl)
247 {
248 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
249 }
250
251 /* Return true if NODE should be considered for inline expansion regardless
252 of the optimization level. This means whenever a function is invoked with
253 its "internal" name, which normally contains the prefix "__builtin". */
254
255 static bool
256 called_as_built_in (tree node)
257 {
258 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
259 we want the name used to call the function, not the name it
260 will have. */
261 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
262 return is_builtin_name (name);
263 }
264
265 /* Compute values M and N such that M divides (address of EXP - N) and such
266 that N < M. If these numbers can be determined, store M in alignp and N in
267 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
268 *alignp and any bit-offset to *bitposp.
269
270 Note that the address (and thus the alignment) computed here is based
271 on the address to which a symbol resolves, whereas DECL_ALIGN is based
272 on the address at which an object is actually located. These two
273 addresses are not always the same. For example, on ARM targets,
274 the address &foo of a Thumb function foo() has the lowest bit set,
275 whereas foo() itself starts on an even address.
276
277 If ADDR_P is true we are taking the address of the memory reference EXP
278 and thus cannot rely on the access taking place. */
279
280 static bool
281 get_object_alignment_2 (tree exp, unsigned int *alignp,
282 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
283 {
284 HOST_WIDE_INT bitsize, bitpos;
285 tree offset;
286 machine_mode mode;
287 int unsignedp, volatilep;
288 unsigned int align = BITS_PER_UNIT;
289 bool known_alignment = false;
290
291 /* Get the innermost object and the constant (bitpos) and possibly
292 variable (offset) offset of the access. */
293 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
294 &mode, &unsignedp, &volatilep, true);
295
296 /* Extract alignment information from the innermost object and
297 possibly adjust bitpos and offset. */
298 if (TREE_CODE (exp) == FUNCTION_DECL)
299 {
300 /* Function addresses can encode extra information besides their
301 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
302 allows the low bit to be used as a virtual bit, we know
303 that the address itself must be at least 2-byte aligned. */
304 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
305 align = 2 * BITS_PER_UNIT;
306 }
307 else if (TREE_CODE (exp) == LABEL_DECL)
308 ;
309 else if (TREE_CODE (exp) == CONST_DECL)
310 {
311 /* The alignment of a CONST_DECL is determined by its initializer. */
312 exp = DECL_INITIAL (exp);
313 align = TYPE_ALIGN (TREE_TYPE (exp));
314 if (CONSTANT_CLASS_P (exp))
315 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
316
317 known_alignment = true;
318 }
319 else if (DECL_P (exp))
320 {
321 align = DECL_ALIGN (exp);
322 known_alignment = true;
323 }
324 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
325 {
326 align = TYPE_ALIGN (TREE_TYPE (exp));
327 }
328 else if (TREE_CODE (exp) == INDIRECT_REF
329 || TREE_CODE (exp) == MEM_REF
330 || TREE_CODE (exp) == TARGET_MEM_REF)
331 {
332 tree addr = TREE_OPERAND (exp, 0);
333 unsigned ptr_align;
334 unsigned HOST_WIDE_INT ptr_bitpos;
335 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
336
337 /* If the address is explicitely aligned, handle that. */
338 if (TREE_CODE (addr) == BIT_AND_EXPR
339 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
340 {
341 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
342 ptr_bitmask *= BITS_PER_UNIT;
343 align = ptr_bitmask & -ptr_bitmask;
344 addr = TREE_OPERAND (addr, 0);
345 }
346
347 known_alignment
348 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
349 align = MAX (ptr_align, align);
350
351 /* Re-apply explicit alignment to the bitpos. */
352 ptr_bitpos &= ptr_bitmask;
353
354 /* The alignment of the pointer operand in a TARGET_MEM_REF
355 has to take the variable offset parts into account. */
356 if (TREE_CODE (exp) == TARGET_MEM_REF)
357 {
358 if (TMR_INDEX (exp))
359 {
360 unsigned HOST_WIDE_INT step = 1;
361 if (TMR_STEP (exp))
362 step = TREE_INT_CST_LOW (TMR_STEP (exp));
363 align = MIN (align, (step & -step) * BITS_PER_UNIT);
364 }
365 if (TMR_INDEX2 (exp))
366 align = BITS_PER_UNIT;
367 known_alignment = false;
368 }
369
370 /* When EXP is an actual memory reference then we can use
371 TYPE_ALIGN of a pointer indirection to derive alignment.
372 Do so only if get_pointer_alignment_1 did not reveal absolute
373 alignment knowledge and if using that alignment would
374 improve the situation. */
375 if (!addr_p && !known_alignment
376 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
377 align = TYPE_ALIGN (TREE_TYPE (exp));
378 else
379 {
380 /* Else adjust bitpos accordingly. */
381 bitpos += ptr_bitpos;
382 if (TREE_CODE (exp) == MEM_REF
383 || TREE_CODE (exp) == TARGET_MEM_REF)
384 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
385 }
386 }
387 else if (TREE_CODE (exp) == STRING_CST)
388 {
389 /* STRING_CST are the only constant objects we allow to be not
390 wrapped inside a CONST_DECL. */
391 align = TYPE_ALIGN (TREE_TYPE (exp));
392 if (CONSTANT_CLASS_P (exp))
393 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
394
395 known_alignment = true;
396 }
397
398 /* If there is a non-constant offset part extract the maximum
399 alignment that can prevail. */
400 if (offset)
401 {
402 unsigned int trailing_zeros = tree_ctz (offset);
403 if (trailing_zeros < HOST_BITS_PER_INT)
404 {
405 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
406 if (inner)
407 align = MIN (align, inner);
408 }
409 }
410
411 *alignp = align;
412 *bitposp = bitpos & (*alignp - 1);
413 return known_alignment;
414 }
415
416 /* For a memory reference expression EXP compute values M and N such that M
417 divides (&EXP - N) and such that N < M. If these numbers can be determined,
418 store M in alignp and N in *BITPOSP and return true. Otherwise return false
419 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
420
421 bool
422 get_object_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
424 {
425 return get_object_alignment_2 (exp, alignp, bitposp, false);
426 }
427
428 /* Return the alignment in bits of EXP, an object. */
429
430 unsigned int
431 get_object_alignment (tree exp)
432 {
433 unsigned HOST_WIDE_INT bitpos = 0;
434 unsigned int align;
435
436 get_object_alignment_1 (exp, &align, &bitpos);
437
438 /* align and bitpos now specify known low bits of the pointer.
439 ptr & (align - 1) == bitpos. */
440
441 if (bitpos != 0)
442 align = (bitpos & -bitpos);
443 return align;
444 }
445
446 /* For a pointer valued expression EXP compute values M and N such that M
447 divides (EXP - N) and such that N < M. If these numbers can be determined,
448 store M in alignp and N in *BITPOSP and return true. Return false if
449 the results are just a conservative approximation.
450
451 If EXP is not a pointer, false is returned too. */
452
453 bool
454 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
455 unsigned HOST_WIDE_INT *bitposp)
456 {
457 STRIP_NOPS (exp);
458
459 if (TREE_CODE (exp) == ADDR_EXPR)
460 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
461 alignp, bitposp, true);
462 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
463 {
464 unsigned int align;
465 unsigned HOST_WIDE_INT bitpos;
466 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
467 &align, &bitpos);
468 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
469 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
470 else
471 {
472 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
473 if (trailing_zeros < HOST_BITS_PER_INT)
474 {
475 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
476 if (inner)
477 align = MIN (align, inner);
478 }
479 }
480 *alignp = align;
481 *bitposp = bitpos & (align - 1);
482 return res;
483 }
484 else if (TREE_CODE (exp) == SSA_NAME
485 && POINTER_TYPE_P (TREE_TYPE (exp)))
486 {
487 unsigned int ptr_align, ptr_misalign;
488 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
489
490 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
491 {
492 *bitposp = ptr_misalign * BITS_PER_UNIT;
493 *alignp = ptr_align * BITS_PER_UNIT;
494 /* We cannot really tell whether this result is an approximation. */
495 return true;
496 }
497 else
498 {
499 *bitposp = 0;
500 *alignp = BITS_PER_UNIT;
501 return false;
502 }
503 }
504 else if (TREE_CODE (exp) == INTEGER_CST)
505 {
506 *alignp = BIGGEST_ALIGNMENT;
507 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
508 & (BIGGEST_ALIGNMENT - 1));
509 return true;
510 }
511
512 *bitposp = 0;
513 *alignp = BITS_PER_UNIT;
514 return false;
515 }
516
517 /* Return the alignment in bits of EXP, a pointer valued expression.
518 The alignment returned is, by default, the alignment of the thing that
519 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
520
521 Otherwise, look at the expression to see if we can do better, i.e., if the
522 expression is actually pointing at an object whose alignment is tighter. */
523
524 unsigned int
525 get_pointer_alignment (tree exp)
526 {
527 unsigned HOST_WIDE_INT bitpos = 0;
528 unsigned int align;
529
530 get_pointer_alignment_1 (exp, &align, &bitpos);
531
532 /* align and bitpos now specify known low bits of the pointer.
533 ptr & (align - 1) == bitpos. */
534
535 if (bitpos != 0)
536 align = (bitpos & -bitpos);
537
538 return align;
539 }
540
541 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
542 way, because it could contain a zero byte in the middle.
543 TREE_STRING_LENGTH is the size of the character array, not the string.
544
545 ONLY_VALUE should be nonzero if the result is not going to be emitted
546 into the instruction stream and zero if it is going to be expanded.
547 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
548 is returned, otherwise NULL, since
549 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
550 evaluate the side-effects.
551
552 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
553 accesses. Note that this implies the result is not going to be emitted
554 into the instruction stream.
555
556 The value returned is of type `ssizetype'.
557
558 Unfortunately, string_constant can't access the values of const char
559 arrays with initializers, so neither can we do so here. */
560
561 tree
562 c_strlen (tree src, int only_value)
563 {
564 tree offset_node;
565 HOST_WIDE_INT offset;
566 int max;
567 const char *ptr;
568 location_t loc;
569
570 STRIP_NOPS (src);
571 if (TREE_CODE (src) == COND_EXPR
572 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
573 {
574 tree len1, len2;
575
576 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
577 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
578 if (tree_int_cst_equal (len1, len2))
579 return len1;
580 }
581
582 if (TREE_CODE (src) == COMPOUND_EXPR
583 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 return c_strlen (TREE_OPERAND (src, 1), only_value);
585
586 loc = EXPR_LOC_OR_LOC (src, input_location);
587
588 src = string_constant (src, &offset_node);
589 if (src == 0)
590 return NULL_TREE;
591
592 max = TREE_STRING_LENGTH (src) - 1;
593 ptr = TREE_STRING_POINTER (src);
594
595 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
596 {
597 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
598 compute the offset to the following null if we don't know where to
599 start searching for it. */
600 int i;
601
602 for (i = 0; i < max; i++)
603 if (ptr[i] == 0)
604 return NULL_TREE;
605
606 /* We don't know the starting offset, but we do know that the string
607 has no internal zero bytes. We can assume that the offset falls
608 within the bounds of the string; otherwise, the programmer deserves
609 what he gets. Subtract the offset from the length of the string,
610 and return that. This would perhaps not be valid if we were dealing
611 with named arrays in addition to literal string constants. */
612
613 return size_diffop_loc (loc, size_int (max), offset_node);
614 }
615
616 /* We have a known offset into the string. Start searching there for
617 a null character if we can represent it as a single HOST_WIDE_INT. */
618 if (offset_node == 0)
619 offset = 0;
620 else if (! tree_fits_shwi_p (offset_node))
621 offset = -1;
622 else
623 offset = tree_to_shwi (offset_node);
624
625 /* If the offset is known to be out of bounds, warn, and call strlen at
626 runtime. */
627 if (offset < 0 || offset > max)
628 {
629 /* Suppress multiple warnings for propagated constant strings. */
630 if (only_value != 2
631 && !TREE_NO_WARNING (src))
632 {
633 warning_at (loc, 0, "offset outside bounds of constant string");
634 TREE_NO_WARNING (src) = 1;
635 }
636 return NULL_TREE;
637 }
638
639 /* Use strlen to search for the first zero byte. Since any strings
640 constructed with build_string will have nulls appended, we win even
641 if we get handed something like (char[4])"abcd".
642
643 Since OFFSET is our starting index into the string, no further
644 calculation is needed. */
645 return ssize_int (strlen (ptr + offset));
646 }
647
648 /* Return a char pointer for a C string if it is a string constant
649 or sum of string constant and integer constant. */
650
651 const char *
652 c_getstr (tree src)
653 {
654 tree offset_node;
655
656 src = string_constant (src, &offset_node);
657 if (src == 0)
658 return 0;
659
660 if (offset_node == 0)
661 return TREE_STRING_POINTER (src);
662 else if (!tree_fits_uhwi_p (offset_node)
663 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
664 return 0;
665
666 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
667 }
668
669 /* Return a constant integer corresponding to target reading
670 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
671
672 static rtx
673 c_readstr (const char *str, machine_mode mode)
674 {
675 HOST_WIDE_INT ch;
676 unsigned int i, j;
677 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
678
679 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
680 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
681 / HOST_BITS_PER_WIDE_INT;
682
683 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
684 for (i = 0; i < len; i++)
685 tmp[i] = 0;
686
687 ch = 1;
688 for (i = 0; i < GET_MODE_SIZE (mode); i++)
689 {
690 j = i;
691 if (WORDS_BIG_ENDIAN)
692 j = GET_MODE_SIZE (mode) - i - 1;
693 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
694 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
695 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
696 j *= BITS_PER_UNIT;
697
698 if (ch)
699 ch = (unsigned char) str[i];
700 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
701 }
702
703 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
704 return immed_wide_int_const (c, mode);
705 }
706
707 /* Cast a target constant CST to target CHAR and if that value fits into
708 host char type, return zero and put that value into variable pointed to by
709 P. */
710
711 static int
712 target_char_cast (tree cst, char *p)
713 {
714 unsigned HOST_WIDE_INT val, hostval;
715
716 if (TREE_CODE (cst) != INTEGER_CST
717 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
718 return 1;
719
720 /* Do not care if it fits or not right here. */
721 val = TREE_INT_CST_LOW (cst);
722
723 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
724 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
725
726 hostval = val;
727 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
728 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
729
730 if (val != hostval)
731 return 1;
732
733 *p = hostval;
734 return 0;
735 }
736
737 /* Similar to save_expr, but assumes that arbitrary code is not executed
738 in between the multiple evaluations. In particular, we assume that a
739 non-addressable local variable will not be modified. */
740
741 static tree
742 builtin_save_expr (tree exp)
743 {
744 if (TREE_CODE (exp) == SSA_NAME
745 || (TREE_ADDRESSABLE (exp) == 0
746 && (TREE_CODE (exp) == PARM_DECL
747 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
748 return exp;
749
750 return save_expr (exp);
751 }
752
753 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
754 times to get the address of either a higher stack frame, or a return
755 address located within it (depending on FNDECL_CODE). */
756
757 static rtx
758 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
759 {
760 int i;
761 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
762 if (tem == NULL_RTX)
763 {
764 /* For a zero count with __builtin_return_address, we don't care what
765 frame address we return, because target-specific definitions will
766 override us. Therefore frame pointer elimination is OK, and using
767 the soft frame pointer is OK.
768
769 For a nonzero count, or a zero count with __builtin_frame_address,
770 we require a stable offset from the current frame pointer to the
771 previous one, so we must use the hard frame pointer, and
772 we must disable frame pointer elimination. */
773 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
774 tem = frame_pointer_rtx;
775 else
776 {
777 tem = hard_frame_pointer_rtx;
778
779 /* Tell reload not to eliminate the frame pointer. */
780 crtl->accesses_prior_frames = 1;
781 }
782 }
783
784 if (count > 0)
785 SETUP_FRAME_ADDRESSES ();
786
787 /* On the SPARC, the return address is not in the frame, it is in a
788 register. There is no way to access it off of the current frame
789 pointer, but it can be accessed off the previous frame pointer by
790 reading the value from the register window save area. */
791 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
792 count--;
793
794 /* Scan back COUNT frames to the specified frame. */
795 for (i = 0; i < count; i++)
796 {
797 /* Assume the dynamic chain pointer is in the word that the
798 frame address points to, unless otherwise specified. */
799 tem = DYNAMIC_CHAIN_ADDRESS (tem);
800 tem = memory_address (Pmode, tem);
801 tem = gen_frame_mem (Pmode, tem);
802 tem = copy_to_reg (tem);
803 }
804
805 /* For __builtin_frame_address, return what we've got. But, on
806 the SPARC for example, we may have to add a bias. */
807 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
808 return FRAME_ADDR_RTX (tem);
809
810 /* For __builtin_return_address, get the return address from that frame. */
811 #ifdef RETURN_ADDR_RTX
812 tem = RETURN_ADDR_RTX (count, tem);
813 #else
814 tem = memory_address (Pmode,
815 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
816 tem = gen_frame_mem (Pmode, tem);
817 #endif
818 return tem;
819 }
820
821 /* Alias set used for setjmp buffer. */
822 static alias_set_type setjmp_alias_set = -1;
823
824 /* Construct the leading half of a __builtin_setjmp call. Control will
825 return to RECEIVER_LABEL. This is also called directly by the SJLJ
826 exception handling code. */
827
828 void
829 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
830 {
831 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
832 rtx stack_save;
833 rtx mem;
834
835 if (setjmp_alias_set == -1)
836 setjmp_alias_set = new_alias_set ();
837
838 buf_addr = convert_memory_address (Pmode, buf_addr);
839
840 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
841
842 /* We store the frame pointer and the address of receiver_label in
843 the buffer and use the rest of it for the stack save area, which
844 is machine-dependent. */
845
846 mem = gen_rtx_MEM (Pmode, buf_addr);
847 set_mem_alias_set (mem, setjmp_alias_set);
848 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
849
850 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
851 GET_MODE_SIZE (Pmode))),
852 set_mem_alias_set (mem, setjmp_alias_set);
853
854 emit_move_insn (validize_mem (mem),
855 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
856
857 stack_save = gen_rtx_MEM (sa_mode,
858 plus_constant (Pmode, buf_addr,
859 2 * GET_MODE_SIZE (Pmode)));
860 set_mem_alias_set (stack_save, setjmp_alias_set);
861 emit_stack_save (SAVE_NONLOCAL, &stack_save);
862
863 /* If there is further processing to do, do it. */
864 if (targetm.have_builtin_setjmp_setup ())
865 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
866
867 /* We have a nonlocal label. */
868 cfun->has_nonlocal_label = 1;
869 }
870
871 /* Construct the trailing part of a __builtin_setjmp call. This is
872 also called directly by the SJLJ exception handling code.
873 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
874
875 void
876 expand_builtin_setjmp_receiver (rtx receiver_label)
877 {
878 rtx chain;
879
880 /* Mark the FP as used when we get here, so we have to make sure it's
881 marked as used by this function. */
882 emit_use (hard_frame_pointer_rtx);
883
884 /* Mark the static chain as clobbered here so life information
885 doesn't get messed up for it. */
886 chain = targetm.calls.static_chain (current_function_decl, true);
887 if (chain && REG_P (chain))
888 emit_clobber (chain);
889
890 /* Now put in the code to restore the frame pointer, and argument
891 pointer, if needed. */
892 if (! targetm.have_nonlocal_goto ())
893 {
894 /* First adjust our frame pointer to its actual value. It was
895 previously set to the start of the virtual area corresponding to
896 the stacked variables when we branched here and now needs to be
897 adjusted to the actual hardware fp value.
898
899 Assignments to virtual registers are converted by
900 instantiate_virtual_regs into the corresponding assignment
901 to the underlying register (fp in this case) that makes
902 the original assignment true.
903 So the following insn will actually be decrementing fp by
904 STARTING_FRAME_OFFSET. */
905 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
906
907 /* Restoring the frame pointer also modifies the hard frame pointer.
908 Mark it used (so that the previous assignment remains live once
909 the frame pointer is eliminated) and clobbered (to represent the
910 implicit update from the assignment). */
911 emit_use (hard_frame_pointer_rtx);
912 emit_clobber (hard_frame_pointer_rtx);
913 }
914
915 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
916 {
917 #ifdef ELIMINABLE_REGS
918 /* If the argument pointer can be eliminated in favor of the
919 frame pointer, we don't need to restore it. We assume here
920 that if such an elimination is present, it can always be used.
921 This is the case on all known machines; if we don't make this
922 assumption, we do unnecessary saving on many machines. */
923 size_t i;
924 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
925
926 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
927 if (elim_regs[i].from == ARG_POINTER_REGNUM
928 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
929 break;
930
931 if (i == ARRAY_SIZE (elim_regs))
932 #endif
933 {
934 /* Now restore our arg pointer from the address at which it
935 was saved in our stack frame. */
936 emit_move_insn (crtl->args.internal_arg_pointer,
937 copy_to_reg (get_arg_pointer_save_area ()));
938 }
939 }
940
941 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
942 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
943 else if (targetm.have_nonlocal_goto_receiver ())
944 emit_insn (targetm.gen_nonlocal_goto_receiver ());
945 else
946 { /* Nothing */ }
947
948 /* We must not allow the code we just generated to be reordered by
949 scheduling. Specifically, the update of the frame pointer must
950 happen immediately, not later. */
951 emit_insn (gen_blockage ());
952 }
953
954 /* __builtin_longjmp is passed a pointer to an array of five words (not
955 all will be used on all machines). It operates similarly to the C
956 library function of the same name, but is more efficient. Much of
957 the code below is copied from the handling of non-local gotos. */
958
959 static void
960 expand_builtin_longjmp (rtx buf_addr, rtx value)
961 {
962 rtx fp, lab, stack;
963 rtx_insn *insn, *last;
964 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
965
966 /* DRAP is needed for stack realign if longjmp is expanded to current
967 function */
968 if (SUPPORTS_STACK_ALIGNMENT)
969 crtl->need_drap = true;
970
971 if (setjmp_alias_set == -1)
972 setjmp_alias_set = new_alias_set ();
973
974 buf_addr = convert_memory_address (Pmode, buf_addr);
975
976 buf_addr = force_reg (Pmode, buf_addr);
977
978 /* We require that the user must pass a second argument of 1, because
979 that is what builtin_setjmp will return. */
980 gcc_assert (value == const1_rtx);
981
982 last = get_last_insn ();
983 if (targetm.have_builtin_longjmp ())
984 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
985 else
986 {
987 fp = gen_rtx_MEM (Pmode, buf_addr);
988 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
989 GET_MODE_SIZE (Pmode)));
990
991 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
992 2 * GET_MODE_SIZE (Pmode)));
993 set_mem_alias_set (fp, setjmp_alias_set);
994 set_mem_alias_set (lab, setjmp_alias_set);
995 set_mem_alias_set (stack, setjmp_alias_set);
996
997 /* Pick up FP, label, and SP from the block and jump. This code is
998 from expand_goto in stmt.c; see there for detailed comments. */
999 if (targetm.have_nonlocal_goto ())
1000 /* We have to pass a value to the nonlocal_goto pattern that will
1001 get copied into the static_chain pointer, but it does not matter
1002 what that value is, because builtin_setjmp does not use it. */
1003 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1004 else
1005 {
1006 lab = copy_to_reg (lab);
1007
1008 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1009 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1010
1011 emit_move_insn (hard_frame_pointer_rtx, fp);
1012 emit_stack_restore (SAVE_NONLOCAL, stack);
1013
1014 emit_use (hard_frame_pointer_rtx);
1015 emit_use (stack_pointer_rtx);
1016 emit_indirect_jump (lab);
1017 }
1018 }
1019
1020 /* Search backwards and mark the jump insn as a non-local goto.
1021 Note that this precludes the use of __builtin_longjmp to a
1022 __builtin_setjmp target in the same function. However, we've
1023 already cautioned the user that these functions are for
1024 internal exception handling use only. */
1025 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1026 {
1027 gcc_assert (insn != last);
1028
1029 if (JUMP_P (insn))
1030 {
1031 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1032 break;
1033 }
1034 else if (CALL_P (insn))
1035 break;
1036 }
1037 }
1038
1039 static inline bool
1040 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1041 {
1042 return (iter->i < iter->n);
1043 }
1044
1045 /* This function validates the types of a function call argument list
1046 against a specified list of tree_codes. If the last specifier is a 0,
1047 that represents an ellipses, otherwise the last specifier must be a
1048 VOID_TYPE. */
1049
1050 static bool
1051 validate_arglist (const_tree callexpr, ...)
1052 {
1053 enum tree_code code;
1054 bool res = 0;
1055 va_list ap;
1056 const_call_expr_arg_iterator iter;
1057 const_tree arg;
1058
1059 va_start (ap, callexpr);
1060 init_const_call_expr_arg_iterator (callexpr, &iter);
1061
1062 do
1063 {
1064 code = (enum tree_code) va_arg (ap, int);
1065 switch (code)
1066 {
1067 case 0:
1068 /* This signifies an ellipses, any further arguments are all ok. */
1069 res = true;
1070 goto end;
1071 case VOID_TYPE:
1072 /* This signifies an endlink, if no arguments remain, return
1073 true, otherwise return false. */
1074 res = !more_const_call_expr_args_p (&iter);
1075 goto end;
1076 default:
1077 /* If no parameters remain or the parameter's code does not
1078 match the specified code, return false. Otherwise continue
1079 checking any remaining arguments. */
1080 arg = next_const_call_expr_arg (&iter);
1081 if (!validate_arg (arg, code))
1082 goto end;
1083 break;
1084 }
1085 }
1086 while (1);
1087
1088 /* We need gotos here since we can only have one VA_CLOSE in a
1089 function. */
1090 end: ;
1091 va_end (ap);
1092
1093 return res;
1094 }
1095
1096 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1097 and the address of the save area. */
1098
1099 static rtx
1100 expand_builtin_nonlocal_goto (tree exp)
1101 {
1102 tree t_label, t_save_area;
1103 rtx r_label, r_save_area, r_fp, r_sp;
1104 rtx_insn *insn;
1105
1106 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1107 return NULL_RTX;
1108
1109 t_label = CALL_EXPR_ARG (exp, 0);
1110 t_save_area = CALL_EXPR_ARG (exp, 1);
1111
1112 r_label = expand_normal (t_label);
1113 r_label = convert_memory_address (Pmode, r_label);
1114 r_save_area = expand_normal (t_save_area);
1115 r_save_area = convert_memory_address (Pmode, r_save_area);
1116 /* Copy the address of the save location to a register just in case it was
1117 based on the frame pointer. */
1118 r_save_area = copy_to_reg (r_save_area);
1119 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1120 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1121 plus_constant (Pmode, r_save_area,
1122 GET_MODE_SIZE (Pmode)));
1123
1124 crtl->has_nonlocal_goto = 1;
1125
1126 /* ??? We no longer need to pass the static chain value, afaik. */
1127 if (targetm.have_nonlocal_goto ())
1128 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1129 else
1130 {
1131 r_label = copy_to_reg (r_label);
1132
1133 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1134 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1135
1136 /* Restore frame pointer for containing function. */
1137 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1138 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1139
1140 /* USE of hard_frame_pointer_rtx added for consistency;
1141 not clear if really needed. */
1142 emit_use (hard_frame_pointer_rtx);
1143 emit_use (stack_pointer_rtx);
1144
1145 /* If the architecture is using a GP register, we must
1146 conservatively assume that the target function makes use of it.
1147 The prologue of functions with nonlocal gotos must therefore
1148 initialize the GP register to the appropriate value, and we
1149 must then make sure that this value is live at the point
1150 of the jump. (Note that this doesn't necessarily apply
1151 to targets with a nonlocal_goto pattern; they are free
1152 to implement it in their own way. Note also that this is
1153 a no-op if the GP register is a global invariant.) */
1154 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1155 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1156 emit_use (pic_offset_table_rtx);
1157
1158 emit_indirect_jump (r_label);
1159 }
1160
1161 /* Search backwards to the jump insn and mark it as a
1162 non-local goto. */
1163 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1164 {
1165 if (JUMP_P (insn))
1166 {
1167 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1168 break;
1169 }
1170 else if (CALL_P (insn))
1171 break;
1172 }
1173
1174 return const0_rtx;
1175 }
1176
1177 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1178 (not all will be used on all machines) that was passed to __builtin_setjmp.
1179 It updates the stack pointer in that block to the current value. This is
1180 also called directly by the SJLJ exception handling code. */
1181
1182 void
1183 expand_builtin_update_setjmp_buf (rtx buf_addr)
1184 {
1185 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1186 rtx stack_save
1187 = gen_rtx_MEM (sa_mode,
1188 memory_address
1189 (sa_mode,
1190 plus_constant (Pmode, buf_addr,
1191 2 * GET_MODE_SIZE (Pmode))));
1192
1193 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1194 }
1195
1196 /* Expand a call to __builtin_prefetch. For a target that does not support
1197 data prefetch, evaluate the memory address argument in case it has side
1198 effects. */
1199
1200 static void
1201 expand_builtin_prefetch (tree exp)
1202 {
1203 tree arg0, arg1, arg2;
1204 int nargs;
1205 rtx op0, op1, op2;
1206
1207 if (!validate_arglist (exp, POINTER_TYPE, 0))
1208 return;
1209
1210 arg0 = CALL_EXPR_ARG (exp, 0);
1211
1212 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1213 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1214 locality). */
1215 nargs = call_expr_nargs (exp);
1216 if (nargs > 1)
1217 arg1 = CALL_EXPR_ARG (exp, 1);
1218 else
1219 arg1 = integer_zero_node;
1220 if (nargs > 2)
1221 arg2 = CALL_EXPR_ARG (exp, 2);
1222 else
1223 arg2 = integer_three_node;
1224
1225 /* Argument 0 is an address. */
1226 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1227
1228 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1229 if (TREE_CODE (arg1) != INTEGER_CST)
1230 {
1231 error ("second argument to %<__builtin_prefetch%> must be a constant");
1232 arg1 = integer_zero_node;
1233 }
1234 op1 = expand_normal (arg1);
1235 /* Argument 1 must be either zero or one. */
1236 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1237 {
1238 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1239 " using zero");
1240 op1 = const0_rtx;
1241 }
1242
1243 /* Argument 2 (locality) must be a compile-time constant int. */
1244 if (TREE_CODE (arg2) != INTEGER_CST)
1245 {
1246 error ("third argument to %<__builtin_prefetch%> must be a constant");
1247 arg2 = integer_zero_node;
1248 }
1249 op2 = expand_normal (arg2);
1250 /* Argument 2 must be 0, 1, 2, or 3. */
1251 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1252 {
1253 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1254 op2 = const0_rtx;
1255 }
1256
1257 if (targetm.have_prefetch ())
1258 {
1259 struct expand_operand ops[3];
1260
1261 create_address_operand (&ops[0], op0);
1262 create_integer_operand (&ops[1], INTVAL (op1));
1263 create_integer_operand (&ops[2], INTVAL (op2));
1264 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1265 return;
1266 }
1267
1268 /* Don't do anything with direct references to volatile memory, but
1269 generate code to handle other side effects. */
1270 if (!MEM_P (op0) && side_effects_p (op0))
1271 emit_insn (op0);
1272 }
1273
1274 /* Get a MEM rtx for expression EXP which is the address of an operand
1275 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1276 the maximum length of the block of memory that might be accessed or
1277 NULL if unknown. */
1278
1279 static rtx
1280 get_memory_rtx (tree exp, tree len)
1281 {
1282 tree orig_exp = exp;
1283 rtx addr, mem;
1284
1285 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1286 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1287 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1288 exp = TREE_OPERAND (exp, 0);
1289
1290 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1291 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1292
1293 /* Get an expression we can use to find the attributes to assign to MEM.
1294 First remove any nops. */
1295 while (CONVERT_EXPR_P (exp)
1296 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1297 exp = TREE_OPERAND (exp, 0);
1298
1299 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1300 (as builtin stringops may alias with anything). */
1301 exp = fold_build2 (MEM_REF,
1302 build_array_type (char_type_node,
1303 build_range_type (sizetype,
1304 size_one_node, len)),
1305 exp, build_int_cst (ptr_type_node, 0));
1306
1307 /* If the MEM_REF has no acceptable address, try to get the base object
1308 from the original address we got, and build an all-aliasing
1309 unknown-sized access to that one. */
1310 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1311 set_mem_attributes (mem, exp, 0);
1312 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1313 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1314 0))))
1315 {
1316 exp = build_fold_addr_expr (exp);
1317 exp = fold_build2 (MEM_REF,
1318 build_array_type (char_type_node,
1319 build_range_type (sizetype,
1320 size_zero_node,
1321 NULL)),
1322 exp, build_int_cst (ptr_type_node, 0));
1323 set_mem_attributes (mem, exp, 0);
1324 }
1325 set_mem_alias_set (mem, 0);
1326 return mem;
1327 }
1328 \f
1329 /* Built-in functions to perform an untyped call and return. */
1330
1331 #define apply_args_mode \
1332 (this_target_builtins->x_apply_args_mode)
1333 #define apply_result_mode \
1334 (this_target_builtins->x_apply_result_mode)
1335
1336 /* Return the size required for the block returned by __builtin_apply_args,
1337 and initialize apply_args_mode. */
1338
1339 static int
1340 apply_args_size (void)
1341 {
1342 static int size = -1;
1343 int align;
1344 unsigned int regno;
1345 machine_mode mode;
1346
1347 /* The values computed by this function never change. */
1348 if (size < 0)
1349 {
1350 /* The first value is the incoming arg-pointer. */
1351 size = GET_MODE_SIZE (Pmode);
1352
1353 /* The second value is the structure value address unless this is
1354 passed as an "invisible" first argument. */
1355 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1356 size += GET_MODE_SIZE (Pmode);
1357
1358 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1359 if (FUNCTION_ARG_REGNO_P (regno))
1360 {
1361 mode = targetm.calls.get_raw_arg_mode (regno);
1362
1363 gcc_assert (mode != VOIDmode);
1364
1365 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1366 if (size % align != 0)
1367 size = CEIL (size, align) * align;
1368 size += GET_MODE_SIZE (mode);
1369 apply_args_mode[regno] = mode;
1370 }
1371 else
1372 {
1373 apply_args_mode[regno] = VOIDmode;
1374 }
1375 }
1376 return size;
1377 }
1378
1379 /* Return the size required for the block returned by __builtin_apply,
1380 and initialize apply_result_mode. */
1381
1382 static int
1383 apply_result_size (void)
1384 {
1385 static int size = -1;
1386 int align, regno;
1387 machine_mode mode;
1388
1389 /* The values computed by this function never change. */
1390 if (size < 0)
1391 {
1392 size = 0;
1393
1394 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1395 if (targetm.calls.function_value_regno_p (regno))
1396 {
1397 mode = targetm.calls.get_raw_result_mode (regno);
1398
1399 gcc_assert (mode != VOIDmode);
1400
1401 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1402 if (size % align != 0)
1403 size = CEIL (size, align) * align;
1404 size += GET_MODE_SIZE (mode);
1405 apply_result_mode[regno] = mode;
1406 }
1407 else
1408 apply_result_mode[regno] = VOIDmode;
1409
1410 /* Allow targets that use untyped_call and untyped_return to override
1411 the size so that machine-specific information can be stored here. */
1412 #ifdef APPLY_RESULT_SIZE
1413 size = APPLY_RESULT_SIZE;
1414 #endif
1415 }
1416 return size;
1417 }
1418
1419 /* Create a vector describing the result block RESULT. If SAVEP is true,
1420 the result block is used to save the values; otherwise it is used to
1421 restore the values. */
1422
1423 static rtx
1424 result_vector (int savep, rtx result)
1425 {
1426 int regno, size, align, nelts;
1427 machine_mode mode;
1428 rtx reg, mem;
1429 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1430
1431 size = nelts = 0;
1432 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1433 if ((mode = apply_result_mode[regno]) != VOIDmode)
1434 {
1435 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1436 if (size % align != 0)
1437 size = CEIL (size, align) * align;
1438 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1439 mem = adjust_address (result, mode, size);
1440 savevec[nelts++] = (savep
1441 ? gen_rtx_SET (mem, reg)
1442 : gen_rtx_SET (reg, mem));
1443 size += GET_MODE_SIZE (mode);
1444 }
1445 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1446 }
1447
1448 /* Save the state required to perform an untyped call with the same
1449 arguments as were passed to the current function. */
1450
1451 static rtx
1452 expand_builtin_apply_args_1 (void)
1453 {
1454 rtx registers, tem;
1455 int size, align, regno;
1456 machine_mode mode;
1457 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1458
1459 /* Create a block where the arg-pointer, structure value address,
1460 and argument registers can be saved. */
1461 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1462
1463 /* Walk past the arg-pointer and structure value address. */
1464 size = GET_MODE_SIZE (Pmode);
1465 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1466 size += GET_MODE_SIZE (Pmode);
1467
1468 /* Save each register used in calling a function to the block. */
1469 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1470 if ((mode = apply_args_mode[regno]) != VOIDmode)
1471 {
1472 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1473 if (size % align != 0)
1474 size = CEIL (size, align) * align;
1475
1476 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1477
1478 emit_move_insn (adjust_address (registers, mode, size), tem);
1479 size += GET_MODE_SIZE (mode);
1480 }
1481
1482 /* Save the arg pointer to the block. */
1483 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1484 /* We need the pointer as the caller actually passed them to us, not
1485 as we might have pretended they were passed. Make sure it's a valid
1486 operand, as emit_move_insn isn't expected to handle a PLUS. */
1487 if (STACK_GROWS_DOWNWARD)
1488 tem
1489 = force_operand (plus_constant (Pmode, tem,
1490 crtl->args.pretend_args_size),
1491 NULL_RTX);
1492 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1493
1494 size = GET_MODE_SIZE (Pmode);
1495
1496 /* Save the structure value address unless this is passed as an
1497 "invisible" first argument. */
1498 if (struct_incoming_value)
1499 {
1500 emit_move_insn (adjust_address (registers, Pmode, size),
1501 copy_to_reg (struct_incoming_value));
1502 size += GET_MODE_SIZE (Pmode);
1503 }
1504
1505 /* Return the address of the block. */
1506 return copy_addr_to_reg (XEXP (registers, 0));
1507 }
1508
1509 /* __builtin_apply_args returns block of memory allocated on
1510 the stack into which is stored the arg pointer, structure
1511 value address, static chain, and all the registers that might
1512 possibly be used in performing a function call. The code is
1513 moved to the start of the function so the incoming values are
1514 saved. */
1515
1516 static rtx
1517 expand_builtin_apply_args (void)
1518 {
1519 /* Don't do __builtin_apply_args more than once in a function.
1520 Save the result of the first call and reuse it. */
1521 if (apply_args_value != 0)
1522 return apply_args_value;
1523 {
1524 /* When this function is called, it means that registers must be
1525 saved on entry to this function. So we migrate the
1526 call to the first insn of this function. */
1527 rtx temp;
1528
1529 start_sequence ();
1530 temp = expand_builtin_apply_args_1 ();
1531 rtx_insn *seq = get_insns ();
1532 end_sequence ();
1533
1534 apply_args_value = temp;
1535
1536 /* Put the insns after the NOTE that starts the function.
1537 If this is inside a start_sequence, make the outer-level insn
1538 chain current, so the code is placed at the start of the
1539 function. If internal_arg_pointer is a non-virtual pseudo,
1540 it needs to be placed after the function that initializes
1541 that pseudo. */
1542 push_topmost_sequence ();
1543 if (REG_P (crtl->args.internal_arg_pointer)
1544 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1545 emit_insn_before (seq, parm_birth_insn);
1546 else
1547 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1548 pop_topmost_sequence ();
1549 return temp;
1550 }
1551 }
1552
1553 /* Perform an untyped call and save the state required to perform an
1554 untyped return of whatever value was returned by the given function. */
1555
1556 static rtx
1557 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1558 {
1559 int size, align, regno;
1560 machine_mode mode;
1561 rtx incoming_args, result, reg, dest, src;
1562 rtx_call_insn *call_insn;
1563 rtx old_stack_level = 0;
1564 rtx call_fusage = 0;
1565 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1566
1567 arguments = convert_memory_address (Pmode, arguments);
1568
1569 /* Create a block where the return registers can be saved. */
1570 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1571
1572 /* Fetch the arg pointer from the ARGUMENTS block. */
1573 incoming_args = gen_reg_rtx (Pmode);
1574 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1575 if (!STACK_GROWS_DOWNWARD)
1576 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1577 incoming_args, 0, OPTAB_LIB_WIDEN);
1578
1579 /* Push a new argument block and copy the arguments. Do not allow
1580 the (potential) memcpy call below to interfere with our stack
1581 manipulations. */
1582 do_pending_stack_adjust ();
1583 NO_DEFER_POP;
1584
1585 /* Save the stack with nonlocal if available. */
1586 if (targetm.have_save_stack_nonlocal ())
1587 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1588 else
1589 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1590
1591 /* Allocate a block of memory onto the stack and copy the memory
1592 arguments to the outgoing arguments address. We can pass TRUE
1593 as the 4th argument because we just saved the stack pointer
1594 and will restore it right after the call. */
1595 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1596
1597 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1598 may have already set current_function_calls_alloca to true.
1599 current_function_calls_alloca won't be set if argsize is zero,
1600 so we have to guarantee need_drap is true here. */
1601 if (SUPPORTS_STACK_ALIGNMENT)
1602 crtl->need_drap = true;
1603
1604 dest = virtual_outgoing_args_rtx;
1605 if (!STACK_GROWS_DOWNWARD)
1606 {
1607 if (CONST_INT_P (argsize))
1608 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1609 else
1610 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1611 }
1612 dest = gen_rtx_MEM (BLKmode, dest);
1613 set_mem_align (dest, PARM_BOUNDARY);
1614 src = gen_rtx_MEM (BLKmode, incoming_args);
1615 set_mem_align (src, PARM_BOUNDARY);
1616 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1617
1618 /* Refer to the argument block. */
1619 apply_args_size ();
1620 arguments = gen_rtx_MEM (BLKmode, arguments);
1621 set_mem_align (arguments, PARM_BOUNDARY);
1622
1623 /* Walk past the arg-pointer and structure value address. */
1624 size = GET_MODE_SIZE (Pmode);
1625 if (struct_value)
1626 size += GET_MODE_SIZE (Pmode);
1627
1628 /* Restore each of the registers previously saved. Make USE insns
1629 for each of these registers for use in making the call. */
1630 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1631 if ((mode = apply_args_mode[regno]) != VOIDmode)
1632 {
1633 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1634 if (size % align != 0)
1635 size = CEIL (size, align) * align;
1636 reg = gen_rtx_REG (mode, regno);
1637 emit_move_insn (reg, adjust_address (arguments, mode, size));
1638 use_reg (&call_fusage, reg);
1639 size += GET_MODE_SIZE (mode);
1640 }
1641
1642 /* Restore the structure value address unless this is passed as an
1643 "invisible" first argument. */
1644 size = GET_MODE_SIZE (Pmode);
1645 if (struct_value)
1646 {
1647 rtx value = gen_reg_rtx (Pmode);
1648 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1649 emit_move_insn (struct_value, value);
1650 if (REG_P (struct_value))
1651 use_reg (&call_fusage, struct_value);
1652 size += GET_MODE_SIZE (Pmode);
1653 }
1654
1655 /* All arguments and registers used for the call are set up by now! */
1656 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1657
1658 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1659 and we don't want to load it into a register as an optimization,
1660 because prepare_call_address already did it if it should be done. */
1661 if (GET_CODE (function) != SYMBOL_REF)
1662 function = memory_address (FUNCTION_MODE, function);
1663
1664 /* Generate the actual call instruction and save the return value. */
1665 if (targetm.have_untyped_call ())
1666 {
1667 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1668 emit_call_insn (targetm.gen_untyped_call (mem, result,
1669 result_vector (1, result)));
1670 }
1671 else if (targetm.have_call_value ())
1672 {
1673 rtx valreg = 0;
1674
1675 /* Locate the unique return register. It is not possible to
1676 express a call that sets more than one return register using
1677 call_value; use untyped_call for that. In fact, untyped_call
1678 only needs to save the return registers in the given block. */
1679 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1680 if ((mode = apply_result_mode[regno]) != VOIDmode)
1681 {
1682 gcc_assert (!valreg); /* have_untyped_call required. */
1683
1684 valreg = gen_rtx_REG (mode, regno);
1685 }
1686
1687 emit_insn (targetm.gen_call_value (valreg,
1688 gen_rtx_MEM (FUNCTION_MODE, function),
1689 const0_rtx, NULL_RTX, const0_rtx));
1690
1691 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1692 }
1693 else
1694 gcc_unreachable ();
1695
1696 /* Find the CALL insn we just emitted, and attach the register usage
1697 information. */
1698 call_insn = last_call_insn ();
1699 add_function_usage_to (call_insn, call_fusage);
1700
1701 /* Restore the stack. */
1702 if (targetm.have_save_stack_nonlocal ())
1703 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1704 else
1705 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1706 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1707
1708 OK_DEFER_POP;
1709
1710 /* Return the address of the result block. */
1711 result = copy_addr_to_reg (XEXP (result, 0));
1712 return convert_memory_address (ptr_mode, result);
1713 }
1714
1715 /* Perform an untyped return. */
1716
1717 static void
1718 expand_builtin_return (rtx result)
1719 {
1720 int size, align, regno;
1721 machine_mode mode;
1722 rtx reg;
1723 rtx_insn *call_fusage = 0;
1724
1725 result = convert_memory_address (Pmode, result);
1726
1727 apply_result_size ();
1728 result = gen_rtx_MEM (BLKmode, result);
1729
1730 if (targetm.have_untyped_return ())
1731 {
1732 rtx vector = result_vector (0, result);
1733 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1734 emit_barrier ();
1735 return;
1736 }
1737
1738 /* Restore the return value and note that each value is used. */
1739 size = 0;
1740 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1741 if ((mode = apply_result_mode[regno]) != VOIDmode)
1742 {
1743 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1744 if (size % align != 0)
1745 size = CEIL (size, align) * align;
1746 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1747 emit_move_insn (reg, adjust_address (result, mode, size));
1748
1749 push_to_sequence (call_fusage);
1750 emit_use (reg);
1751 call_fusage = get_insns ();
1752 end_sequence ();
1753 size += GET_MODE_SIZE (mode);
1754 }
1755
1756 /* Put the USE insns before the return. */
1757 emit_insn (call_fusage);
1758
1759 /* Return whatever values was restored by jumping directly to the end
1760 of the function. */
1761 expand_naked_return ();
1762 }
1763
1764 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1765
1766 static enum type_class
1767 type_to_class (tree type)
1768 {
1769 switch (TREE_CODE (type))
1770 {
1771 case VOID_TYPE: return void_type_class;
1772 case INTEGER_TYPE: return integer_type_class;
1773 case ENUMERAL_TYPE: return enumeral_type_class;
1774 case BOOLEAN_TYPE: return boolean_type_class;
1775 case POINTER_TYPE: return pointer_type_class;
1776 case REFERENCE_TYPE: return reference_type_class;
1777 case OFFSET_TYPE: return offset_type_class;
1778 case REAL_TYPE: return real_type_class;
1779 case COMPLEX_TYPE: return complex_type_class;
1780 case FUNCTION_TYPE: return function_type_class;
1781 case METHOD_TYPE: return method_type_class;
1782 case RECORD_TYPE: return record_type_class;
1783 case UNION_TYPE:
1784 case QUAL_UNION_TYPE: return union_type_class;
1785 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1786 ? string_type_class : array_type_class);
1787 case LANG_TYPE: return lang_type_class;
1788 default: return no_type_class;
1789 }
1790 }
1791
1792 /* Expand a call EXP to __builtin_classify_type. */
1793
1794 static rtx
1795 expand_builtin_classify_type (tree exp)
1796 {
1797 if (call_expr_nargs (exp))
1798 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1799 return GEN_INT (no_type_class);
1800 }
1801
1802 /* This helper macro, meant to be used in mathfn_built_in below,
1803 determines which among a set of three builtin math functions is
1804 appropriate for a given type mode. The `F' and `L' cases are
1805 automatically generated from the `double' case. */
1806 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1807 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1808 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1809 fcodel = BUILT_IN_MATHFN##L ; break;
1810 /* Similar to above, but appends _R after any F/L suffix. */
1811 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1812 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1813 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1814 fcodel = BUILT_IN_MATHFN##L_R ; break;
1815
1816 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1817 if available. If IMPLICIT is true use the implicit builtin declaration,
1818 otherwise use the explicit declaration. If we can't do the conversion,
1819 return zero. */
1820
1821 static tree
1822 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1823 {
1824 enum built_in_function fcode, fcodef, fcodel, fcode2;
1825
1826 switch (fn)
1827 {
1828 CASE_MATHFN (BUILT_IN_ACOS)
1829 CASE_MATHFN (BUILT_IN_ACOSH)
1830 CASE_MATHFN (BUILT_IN_ASIN)
1831 CASE_MATHFN (BUILT_IN_ASINH)
1832 CASE_MATHFN (BUILT_IN_ATAN)
1833 CASE_MATHFN (BUILT_IN_ATAN2)
1834 CASE_MATHFN (BUILT_IN_ATANH)
1835 CASE_MATHFN (BUILT_IN_CBRT)
1836 CASE_MATHFN (BUILT_IN_CEIL)
1837 CASE_MATHFN (BUILT_IN_CEXPI)
1838 CASE_MATHFN (BUILT_IN_COPYSIGN)
1839 CASE_MATHFN (BUILT_IN_COS)
1840 CASE_MATHFN (BUILT_IN_COSH)
1841 CASE_MATHFN (BUILT_IN_DREM)
1842 CASE_MATHFN (BUILT_IN_ERF)
1843 CASE_MATHFN (BUILT_IN_ERFC)
1844 CASE_MATHFN (BUILT_IN_EXP)
1845 CASE_MATHFN (BUILT_IN_EXP10)
1846 CASE_MATHFN (BUILT_IN_EXP2)
1847 CASE_MATHFN (BUILT_IN_EXPM1)
1848 CASE_MATHFN (BUILT_IN_FABS)
1849 CASE_MATHFN (BUILT_IN_FDIM)
1850 CASE_MATHFN (BUILT_IN_FLOOR)
1851 CASE_MATHFN (BUILT_IN_FMA)
1852 CASE_MATHFN (BUILT_IN_FMAX)
1853 CASE_MATHFN (BUILT_IN_FMIN)
1854 CASE_MATHFN (BUILT_IN_FMOD)
1855 CASE_MATHFN (BUILT_IN_FREXP)
1856 CASE_MATHFN (BUILT_IN_GAMMA)
1857 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1858 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1859 CASE_MATHFN (BUILT_IN_HYPOT)
1860 CASE_MATHFN (BUILT_IN_ILOGB)
1861 CASE_MATHFN (BUILT_IN_ICEIL)
1862 CASE_MATHFN (BUILT_IN_IFLOOR)
1863 CASE_MATHFN (BUILT_IN_INF)
1864 CASE_MATHFN (BUILT_IN_IRINT)
1865 CASE_MATHFN (BUILT_IN_IROUND)
1866 CASE_MATHFN (BUILT_IN_ISINF)
1867 CASE_MATHFN (BUILT_IN_J0)
1868 CASE_MATHFN (BUILT_IN_J1)
1869 CASE_MATHFN (BUILT_IN_JN)
1870 CASE_MATHFN (BUILT_IN_LCEIL)
1871 CASE_MATHFN (BUILT_IN_LDEXP)
1872 CASE_MATHFN (BUILT_IN_LFLOOR)
1873 CASE_MATHFN (BUILT_IN_LGAMMA)
1874 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1875 CASE_MATHFN (BUILT_IN_LLCEIL)
1876 CASE_MATHFN (BUILT_IN_LLFLOOR)
1877 CASE_MATHFN (BUILT_IN_LLRINT)
1878 CASE_MATHFN (BUILT_IN_LLROUND)
1879 CASE_MATHFN (BUILT_IN_LOG)
1880 CASE_MATHFN (BUILT_IN_LOG10)
1881 CASE_MATHFN (BUILT_IN_LOG1P)
1882 CASE_MATHFN (BUILT_IN_LOG2)
1883 CASE_MATHFN (BUILT_IN_LOGB)
1884 CASE_MATHFN (BUILT_IN_LRINT)
1885 CASE_MATHFN (BUILT_IN_LROUND)
1886 CASE_MATHFN (BUILT_IN_MODF)
1887 CASE_MATHFN (BUILT_IN_NAN)
1888 CASE_MATHFN (BUILT_IN_NANS)
1889 CASE_MATHFN (BUILT_IN_NEARBYINT)
1890 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1891 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1892 CASE_MATHFN (BUILT_IN_POW)
1893 CASE_MATHFN (BUILT_IN_POWI)
1894 CASE_MATHFN (BUILT_IN_POW10)
1895 CASE_MATHFN (BUILT_IN_REMAINDER)
1896 CASE_MATHFN (BUILT_IN_REMQUO)
1897 CASE_MATHFN (BUILT_IN_RINT)
1898 CASE_MATHFN (BUILT_IN_ROUND)
1899 CASE_MATHFN (BUILT_IN_SCALB)
1900 CASE_MATHFN (BUILT_IN_SCALBLN)
1901 CASE_MATHFN (BUILT_IN_SCALBN)
1902 CASE_MATHFN (BUILT_IN_SIGNBIT)
1903 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1904 CASE_MATHFN (BUILT_IN_SIN)
1905 CASE_MATHFN (BUILT_IN_SINCOS)
1906 CASE_MATHFN (BUILT_IN_SINH)
1907 CASE_MATHFN (BUILT_IN_SQRT)
1908 CASE_MATHFN (BUILT_IN_TAN)
1909 CASE_MATHFN (BUILT_IN_TANH)
1910 CASE_MATHFN (BUILT_IN_TGAMMA)
1911 CASE_MATHFN (BUILT_IN_TRUNC)
1912 CASE_MATHFN (BUILT_IN_Y0)
1913 CASE_MATHFN (BUILT_IN_Y1)
1914 CASE_MATHFN (BUILT_IN_YN)
1915
1916 default:
1917 return NULL_TREE;
1918 }
1919
1920 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1921 fcode2 = fcode;
1922 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1923 fcode2 = fcodef;
1924 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1925 fcode2 = fcodel;
1926 else
1927 return NULL_TREE;
1928
1929 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1930 return NULL_TREE;
1931
1932 return builtin_decl_explicit (fcode2);
1933 }
1934
1935 /* Like mathfn_built_in_1(), but always use the implicit array. */
1936
1937 tree
1938 mathfn_built_in (tree type, enum built_in_function fn)
1939 {
1940 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1941 }
1942
1943 /* If errno must be maintained, expand the RTL to check if the result,
1944 TARGET, of a built-in function call, EXP, is NaN, and if so set
1945 errno to EDOM. */
1946
1947 static void
1948 expand_errno_check (tree exp, rtx target)
1949 {
1950 rtx_code_label *lab = gen_label_rtx ();
1951
1952 /* Test the result; if it is NaN, set errno=EDOM because
1953 the argument was not in the domain. */
1954 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1955 NULL_RTX, NULL, lab,
1956 /* The jump is very likely. */
1957 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1958
1959 #ifdef TARGET_EDOM
1960 /* If this built-in doesn't throw an exception, set errno directly. */
1961 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1962 {
1963 #ifdef GEN_ERRNO_RTX
1964 rtx errno_rtx = GEN_ERRNO_RTX;
1965 #else
1966 rtx errno_rtx
1967 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1968 #endif
1969 emit_move_insn (errno_rtx,
1970 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1971 emit_label (lab);
1972 return;
1973 }
1974 #endif
1975
1976 /* Make sure the library call isn't expanded as a tail call. */
1977 CALL_EXPR_TAILCALL (exp) = 0;
1978
1979 /* We can't set errno=EDOM directly; let the library call do it.
1980 Pop the arguments right away in case the call gets deleted. */
1981 NO_DEFER_POP;
1982 expand_call (exp, target, 0);
1983 OK_DEFER_POP;
1984 emit_label (lab);
1985 }
1986
1987 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1988 Return NULL_RTX if a normal call should be emitted rather than expanding
1989 the function in-line. EXP is the expression that is a call to the builtin
1990 function; if convenient, the result should be placed in TARGET.
1991 SUBTARGET may be used as the target for computing one of EXP's operands. */
1992
1993 static rtx
1994 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1995 {
1996 optab builtin_optab;
1997 rtx op0;
1998 rtx_insn *insns;
1999 tree fndecl = get_callee_fndecl (exp);
2000 machine_mode mode;
2001 bool errno_set = false;
2002 bool try_widening = false;
2003 tree arg;
2004
2005 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2006 return NULL_RTX;
2007
2008 arg = CALL_EXPR_ARG (exp, 0);
2009
2010 switch (DECL_FUNCTION_CODE (fndecl))
2011 {
2012 CASE_FLT_FN (BUILT_IN_SQRT):
2013 errno_set = ! tree_expr_nonnegative_p (arg);
2014 try_widening = true;
2015 builtin_optab = sqrt_optab;
2016 break;
2017 CASE_FLT_FN (BUILT_IN_EXP):
2018 errno_set = true; builtin_optab = exp_optab; break;
2019 CASE_FLT_FN (BUILT_IN_EXP10):
2020 CASE_FLT_FN (BUILT_IN_POW10):
2021 errno_set = true; builtin_optab = exp10_optab; break;
2022 CASE_FLT_FN (BUILT_IN_EXP2):
2023 errno_set = true; builtin_optab = exp2_optab; break;
2024 CASE_FLT_FN (BUILT_IN_EXPM1):
2025 errno_set = true; builtin_optab = expm1_optab; break;
2026 CASE_FLT_FN (BUILT_IN_LOGB):
2027 errno_set = true; builtin_optab = logb_optab; break;
2028 CASE_FLT_FN (BUILT_IN_LOG):
2029 errno_set = true; builtin_optab = log_optab; break;
2030 CASE_FLT_FN (BUILT_IN_LOG10):
2031 errno_set = true; builtin_optab = log10_optab; break;
2032 CASE_FLT_FN (BUILT_IN_LOG2):
2033 errno_set = true; builtin_optab = log2_optab; break;
2034 CASE_FLT_FN (BUILT_IN_LOG1P):
2035 errno_set = true; builtin_optab = log1p_optab; break;
2036 CASE_FLT_FN (BUILT_IN_ASIN):
2037 builtin_optab = asin_optab; break;
2038 CASE_FLT_FN (BUILT_IN_ACOS):
2039 builtin_optab = acos_optab; break;
2040 CASE_FLT_FN (BUILT_IN_TAN):
2041 builtin_optab = tan_optab; break;
2042 CASE_FLT_FN (BUILT_IN_ATAN):
2043 builtin_optab = atan_optab; break;
2044 CASE_FLT_FN (BUILT_IN_FLOOR):
2045 builtin_optab = floor_optab; break;
2046 CASE_FLT_FN (BUILT_IN_CEIL):
2047 builtin_optab = ceil_optab; break;
2048 CASE_FLT_FN (BUILT_IN_TRUNC):
2049 builtin_optab = btrunc_optab; break;
2050 CASE_FLT_FN (BUILT_IN_ROUND):
2051 builtin_optab = round_optab; break;
2052 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2053 builtin_optab = nearbyint_optab;
2054 if (flag_trapping_math)
2055 break;
2056 /* Else fallthrough and expand as rint. */
2057 CASE_FLT_FN (BUILT_IN_RINT):
2058 builtin_optab = rint_optab; break;
2059 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2060 builtin_optab = significand_optab; break;
2061 default:
2062 gcc_unreachable ();
2063 }
2064
2065 /* Make a suitable register to place result in. */
2066 mode = TYPE_MODE (TREE_TYPE (exp));
2067
2068 if (! flag_errno_math || ! HONOR_NANS (mode))
2069 errno_set = false;
2070
2071 /* Before working hard, check whether the instruction is available, but try
2072 to widen the mode for specific operations. */
2073 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2074 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2075 && (!errno_set || !optimize_insn_for_size_p ()))
2076 {
2077 rtx result = gen_reg_rtx (mode);
2078
2079 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2080 need to expand the argument again. This way, we will not perform
2081 side-effects more the once. */
2082 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2083
2084 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2085
2086 start_sequence ();
2087
2088 /* Compute into RESULT.
2089 Set RESULT to wherever the result comes back. */
2090 result = expand_unop (mode, builtin_optab, op0, result, 0);
2091
2092 if (result != 0)
2093 {
2094 if (errno_set)
2095 expand_errno_check (exp, result);
2096
2097 /* Output the entire sequence. */
2098 insns = get_insns ();
2099 end_sequence ();
2100 emit_insn (insns);
2101 return result;
2102 }
2103
2104 /* If we were unable to expand via the builtin, stop the sequence
2105 (without outputting the insns) and call to the library function
2106 with the stabilized argument list. */
2107 end_sequence ();
2108 }
2109
2110 return expand_call (exp, target, target == const0_rtx);
2111 }
2112
2113 /* Expand a call to the builtin binary math functions (pow and atan2).
2114 Return NULL_RTX if a normal call should be emitted rather than expanding the
2115 function in-line. EXP is the expression that is a call to the builtin
2116 function; if convenient, the result should be placed in TARGET.
2117 SUBTARGET may be used as the target for computing one of EXP's
2118 operands. */
2119
2120 static rtx
2121 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2122 {
2123 optab builtin_optab;
2124 rtx op0, op1, result;
2125 rtx_insn *insns;
2126 int op1_type = REAL_TYPE;
2127 tree fndecl = get_callee_fndecl (exp);
2128 tree arg0, arg1;
2129 machine_mode mode;
2130 bool errno_set = true;
2131
2132 switch (DECL_FUNCTION_CODE (fndecl))
2133 {
2134 CASE_FLT_FN (BUILT_IN_SCALBN):
2135 CASE_FLT_FN (BUILT_IN_SCALBLN):
2136 CASE_FLT_FN (BUILT_IN_LDEXP):
2137 op1_type = INTEGER_TYPE;
2138 default:
2139 break;
2140 }
2141
2142 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2143 return NULL_RTX;
2144
2145 arg0 = CALL_EXPR_ARG (exp, 0);
2146 arg1 = CALL_EXPR_ARG (exp, 1);
2147
2148 switch (DECL_FUNCTION_CODE (fndecl))
2149 {
2150 CASE_FLT_FN (BUILT_IN_POW):
2151 builtin_optab = pow_optab; break;
2152 CASE_FLT_FN (BUILT_IN_ATAN2):
2153 builtin_optab = atan2_optab; break;
2154 CASE_FLT_FN (BUILT_IN_SCALB):
2155 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2156 return 0;
2157 builtin_optab = scalb_optab; break;
2158 CASE_FLT_FN (BUILT_IN_SCALBN):
2159 CASE_FLT_FN (BUILT_IN_SCALBLN):
2160 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2161 return 0;
2162 /* Fall through... */
2163 CASE_FLT_FN (BUILT_IN_LDEXP):
2164 builtin_optab = ldexp_optab; break;
2165 CASE_FLT_FN (BUILT_IN_FMOD):
2166 builtin_optab = fmod_optab; break;
2167 CASE_FLT_FN (BUILT_IN_REMAINDER):
2168 CASE_FLT_FN (BUILT_IN_DREM):
2169 builtin_optab = remainder_optab; break;
2170 default:
2171 gcc_unreachable ();
2172 }
2173
2174 /* Make a suitable register to place result in. */
2175 mode = TYPE_MODE (TREE_TYPE (exp));
2176
2177 /* Before working hard, check whether the instruction is available. */
2178 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2179 return NULL_RTX;
2180
2181 result = gen_reg_rtx (mode);
2182
2183 if (! flag_errno_math || ! HONOR_NANS (mode))
2184 errno_set = false;
2185
2186 if (errno_set && optimize_insn_for_size_p ())
2187 return 0;
2188
2189 /* Always stabilize the argument list. */
2190 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2191 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2192
2193 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2194 op1 = expand_normal (arg1);
2195
2196 start_sequence ();
2197
2198 /* Compute into RESULT.
2199 Set RESULT to wherever the result comes back. */
2200 result = expand_binop (mode, builtin_optab, op0, op1,
2201 result, 0, OPTAB_DIRECT);
2202
2203 /* If we were unable to expand via the builtin, stop the sequence
2204 (without outputting the insns) and call to the library function
2205 with the stabilized argument list. */
2206 if (result == 0)
2207 {
2208 end_sequence ();
2209 return expand_call (exp, target, target == const0_rtx);
2210 }
2211
2212 if (errno_set)
2213 expand_errno_check (exp, result);
2214
2215 /* Output the entire sequence. */
2216 insns = get_insns ();
2217 end_sequence ();
2218 emit_insn (insns);
2219
2220 return result;
2221 }
2222
2223 /* Expand a call to the builtin trinary math functions (fma).
2224 Return NULL_RTX if a normal call should be emitted rather than expanding the
2225 function in-line. EXP is the expression that is a call to the builtin
2226 function; if convenient, the result should be placed in TARGET.
2227 SUBTARGET may be used as the target for computing one of EXP's
2228 operands. */
2229
2230 static rtx
2231 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2232 {
2233 optab builtin_optab;
2234 rtx op0, op1, op2, result;
2235 rtx_insn *insns;
2236 tree fndecl = get_callee_fndecl (exp);
2237 tree arg0, arg1, arg2;
2238 machine_mode mode;
2239
2240 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2241 return NULL_RTX;
2242
2243 arg0 = CALL_EXPR_ARG (exp, 0);
2244 arg1 = CALL_EXPR_ARG (exp, 1);
2245 arg2 = CALL_EXPR_ARG (exp, 2);
2246
2247 switch (DECL_FUNCTION_CODE (fndecl))
2248 {
2249 CASE_FLT_FN (BUILT_IN_FMA):
2250 builtin_optab = fma_optab; break;
2251 default:
2252 gcc_unreachable ();
2253 }
2254
2255 /* Make a suitable register to place result in. */
2256 mode = TYPE_MODE (TREE_TYPE (exp));
2257
2258 /* Before working hard, check whether the instruction is available. */
2259 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2260 return NULL_RTX;
2261
2262 result = gen_reg_rtx (mode);
2263
2264 /* Always stabilize the argument list. */
2265 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2266 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2267 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2268
2269 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2270 op1 = expand_normal (arg1);
2271 op2 = expand_normal (arg2);
2272
2273 start_sequence ();
2274
2275 /* Compute into RESULT.
2276 Set RESULT to wherever the result comes back. */
2277 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2278 result, 0);
2279
2280 /* If we were unable to expand via the builtin, stop the sequence
2281 (without outputting the insns) and call to the library function
2282 with the stabilized argument list. */
2283 if (result == 0)
2284 {
2285 end_sequence ();
2286 return expand_call (exp, target, target == const0_rtx);
2287 }
2288
2289 /* Output the entire sequence. */
2290 insns = get_insns ();
2291 end_sequence ();
2292 emit_insn (insns);
2293
2294 return result;
2295 }
2296
2297 /* Expand a call to the builtin sin and cos math functions.
2298 Return NULL_RTX if a normal call should be emitted rather than expanding the
2299 function in-line. EXP is the expression that is a call to the builtin
2300 function; if convenient, the result should be placed in TARGET.
2301 SUBTARGET may be used as the target for computing one of EXP's
2302 operands. */
2303
2304 static rtx
2305 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2306 {
2307 optab builtin_optab;
2308 rtx op0;
2309 rtx_insn *insns;
2310 tree fndecl = get_callee_fndecl (exp);
2311 machine_mode mode;
2312 tree arg;
2313
2314 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2315 return NULL_RTX;
2316
2317 arg = CALL_EXPR_ARG (exp, 0);
2318
2319 switch (DECL_FUNCTION_CODE (fndecl))
2320 {
2321 CASE_FLT_FN (BUILT_IN_SIN):
2322 CASE_FLT_FN (BUILT_IN_COS):
2323 builtin_optab = sincos_optab; break;
2324 default:
2325 gcc_unreachable ();
2326 }
2327
2328 /* Make a suitable register to place result in. */
2329 mode = TYPE_MODE (TREE_TYPE (exp));
2330
2331 /* Check if sincos insn is available, otherwise fallback
2332 to sin or cos insn. */
2333 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2334 switch (DECL_FUNCTION_CODE (fndecl))
2335 {
2336 CASE_FLT_FN (BUILT_IN_SIN):
2337 builtin_optab = sin_optab; break;
2338 CASE_FLT_FN (BUILT_IN_COS):
2339 builtin_optab = cos_optab; break;
2340 default:
2341 gcc_unreachable ();
2342 }
2343
2344 /* Before working hard, check whether the instruction is available. */
2345 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2346 {
2347 rtx result = gen_reg_rtx (mode);
2348
2349 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2350 need to expand the argument again. This way, we will not perform
2351 side-effects more the once. */
2352 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2353
2354 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2355
2356 start_sequence ();
2357
2358 /* Compute into RESULT.
2359 Set RESULT to wherever the result comes back. */
2360 if (builtin_optab == sincos_optab)
2361 {
2362 int ok;
2363
2364 switch (DECL_FUNCTION_CODE (fndecl))
2365 {
2366 CASE_FLT_FN (BUILT_IN_SIN):
2367 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2368 break;
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2371 break;
2372 default:
2373 gcc_unreachable ();
2374 }
2375 gcc_assert (ok);
2376 }
2377 else
2378 result = expand_unop (mode, builtin_optab, op0, result, 0);
2379
2380 if (result != 0)
2381 {
2382 /* Output the entire sequence. */
2383 insns = get_insns ();
2384 end_sequence ();
2385 emit_insn (insns);
2386 return result;
2387 }
2388
2389 /* If we were unable to expand via the builtin, stop the sequence
2390 (without outputting the insns) and call to the library function
2391 with the stabilized argument list. */
2392 end_sequence ();
2393 }
2394
2395 return expand_call (exp, target, target == const0_rtx);
2396 }
2397
2398 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2399 return an RTL instruction code that implements the functionality.
2400 If that isn't possible or available return CODE_FOR_nothing. */
2401
2402 static enum insn_code
2403 interclass_mathfn_icode (tree arg, tree fndecl)
2404 {
2405 bool errno_set = false;
2406 optab builtin_optab = unknown_optab;
2407 machine_mode mode;
2408
2409 switch (DECL_FUNCTION_CODE (fndecl))
2410 {
2411 CASE_FLT_FN (BUILT_IN_ILOGB):
2412 errno_set = true; builtin_optab = ilogb_optab; break;
2413 CASE_FLT_FN (BUILT_IN_ISINF):
2414 builtin_optab = isinf_optab; break;
2415 case BUILT_IN_ISNORMAL:
2416 case BUILT_IN_ISFINITE:
2417 CASE_FLT_FN (BUILT_IN_FINITE):
2418 case BUILT_IN_FINITED32:
2419 case BUILT_IN_FINITED64:
2420 case BUILT_IN_FINITED128:
2421 case BUILT_IN_ISINFD32:
2422 case BUILT_IN_ISINFD64:
2423 case BUILT_IN_ISINFD128:
2424 /* These builtins have no optabs (yet). */
2425 break;
2426 default:
2427 gcc_unreachable ();
2428 }
2429
2430 /* There's no easy way to detect the case we need to set EDOM. */
2431 if (flag_errno_math && errno_set)
2432 return CODE_FOR_nothing;
2433
2434 /* Optab mode depends on the mode of the input argument. */
2435 mode = TYPE_MODE (TREE_TYPE (arg));
2436
2437 if (builtin_optab)
2438 return optab_handler (builtin_optab, mode);
2439 return CODE_FOR_nothing;
2440 }
2441
2442 /* Expand a call to one of the builtin math functions that operate on
2443 floating point argument and output an integer result (ilogb, isinf,
2444 isnan, etc).
2445 Return 0 if a normal call should be emitted rather than expanding the
2446 function in-line. EXP is the expression that is a call to the builtin
2447 function; if convenient, the result should be placed in TARGET. */
2448
2449 static rtx
2450 expand_builtin_interclass_mathfn (tree exp, rtx target)
2451 {
2452 enum insn_code icode = CODE_FOR_nothing;
2453 rtx op0;
2454 tree fndecl = get_callee_fndecl (exp);
2455 machine_mode mode;
2456 tree arg;
2457
2458 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2459 return NULL_RTX;
2460
2461 arg = CALL_EXPR_ARG (exp, 0);
2462 icode = interclass_mathfn_icode (arg, fndecl);
2463 mode = TYPE_MODE (TREE_TYPE (arg));
2464
2465 if (icode != CODE_FOR_nothing)
2466 {
2467 struct expand_operand ops[1];
2468 rtx_insn *last = get_last_insn ();
2469 tree orig_arg = arg;
2470
2471 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2472 need to expand the argument again. This way, we will not perform
2473 side-effects more the once. */
2474 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2475
2476 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2477
2478 if (mode != GET_MODE (op0))
2479 op0 = convert_to_mode (mode, op0, 0);
2480
2481 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2482 if (maybe_legitimize_operands (icode, 0, 1, ops)
2483 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2484 return ops[0].value;
2485
2486 delete_insns_since (last);
2487 CALL_EXPR_ARG (exp, 0) = orig_arg;
2488 }
2489
2490 return NULL_RTX;
2491 }
2492
2493 /* Expand a call to the builtin sincos math function.
2494 Return NULL_RTX if a normal call should be emitted rather than expanding the
2495 function in-line. EXP is the expression that is a call to the builtin
2496 function. */
2497
2498 static rtx
2499 expand_builtin_sincos (tree exp)
2500 {
2501 rtx op0, op1, op2, target1, target2;
2502 machine_mode mode;
2503 tree arg, sinp, cosp;
2504 int result;
2505 location_t loc = EXPR_LOCATION (exp);
2506 tree alias_type, alias_off;
2507
2508 if (!validate_arglist (exp, REAL_TYPE,
2509 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2510 return NULL_RTX;
2511
2512 arg = CALL_EXPR_ARG (exp, 0);
2513 sinp = CALL_EXPR_ARG (exp, 1);
2514 cosp = CALL_EXPR_ARG (exp, 2);
2515
2516 /* Make a suitable register to place result in. */
2517 mode = TYPE_MODE (TREE_TYPE (arg));
2518
2519 /* Check if sincos insn is available, otherwise emit the call. */
2520 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2521 return NULL_RTX;
2522
2523 target1 = gen_reg_rtx (mode);
2524 target2 = gen_reg_rtx (mode);
2525
2526 op0 = expand_normal (arg);
2527 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2528 alias_off = build_int_cst (alias_type, 0);
2529 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2530 sinp, alias_off));
2531 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2532 cosp, alias_off));
2533
2534 /* Compute into target1 and target2.
2535 Set TARGET to wherever the result comes back. */
2536 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2537 gcc_assert (result);
2538
2539 /* Move target1 and target2 to the memory locations indicated
2540 by op1 and op2. */
2541 emit_move_insn (op1, target1);
2542 emit_move_insn (op2, target2);
2543
2544 return const0_rtx;
2545 }
2546
2547 /* Expand a call to the internal cexpi builtin to the sincos math function.
2548 EXP is the expression that is a call to the builtin function; if convenient,
2549 the result should be placed in TARGET. */
2550
2551 static rtx
2552 expand_builtin_cexpi (tree exp, rtx target)
2553 {
2554 tree fndecl = get_callee_fndecl (exp);
2555 tree arg, type;
2556 machine_mode mode;
2557 rtx op0, op1, op2;
2558 location_t loc = EXPR_LOCATION (exp);
2559
2560 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2561 return NULL_RTX;
2562
2563 arg = CALL_EXPR_ARG (exp, 0);
2564 type = TREE_TYPE (arg);
2565 mode = TYPE_MODE (TREE_TYPE (arg));
2566
2567 /* Try expanding via a sincos optab, fall back to emitting a libcall
2568 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2569 is only generated from sincos, cexp or if we have either of them. */
2570 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2571 {
2572 op1 = gen_reg_rtx (mode);
2573 op2 = gen_reg_rtx (mode);
2574
2575 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2576
2577 /* Compute into op1 and op2. */
2578 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2579 }
2580 else if (targetm.libc_has_function (function_sincos))
2581 {
2582 tree call, fn = NULL_TREE;
2583 tree top1, top2;
2584 rtx op1a, op2a;
2585
2586 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2587 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2588 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2589 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2590 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2591 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2592 else
2593 gcc_unreachable ();
2594
2595 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2596 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2597 op1a = copy_addr_to_reg (XEXP (op1, 0));
2598 op2a = copy_addr_to_reg (XEXP (op2, 0));
2599 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2600 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2601
2602 /* Make sure not to fold the sincos call again. */
2603 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2604 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2605 call, 3, arg, top1, top2));
2606 }
2607 else
2608 {
2609 tree call, fn = NULL_TREE, narg;
2610 tree ctype = build_complex_type (type);
2611
2612 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2616 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2618 else
2619 gcc_unreachable ();
2620
2621 /* If we don't have a decl for cexp create one. This is the
2622 friendliest fallback if the user calls __builtin_cexpi
2623 without full target C99 function support. */
2624 if (fn == NULL_TREE)
2625 {
2626 tree fntype;
2627 const char *name = NULL;
2628
2629 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2630 name = "cexpf";
2631 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2632 name = "cexp";
2633 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2634 name = "cexpl";
2635
2636 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2637 fn = build_fn_decl (name, fntype);
2638 }
2639
2640 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2641 build_real (type, dconst0), arg);
2642
2643 /* Make sure not to fold the cexp call again. */
2644 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2645 return expand_expr (build_call_nary (ctype, call, 1, narg),
2646 target, VOIDmode, EXPAND_NORMAL);
2647 }
2648
2649 /* Now build the proper return type. */
2650 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2651 make_tree (TREE_TYPE (arg), op2),
2652 make_tree (TREE_TYPE (arg), op1)),
2653 target, VOIDmode, EXPAND_NORMAL);
2654 }
2655
2656 /* Conveniently construct a function call expression. FNDECL names the
2657 function to be called, N is the number of arguments, and the "..."
2658 parameters are the argument expressions. Unlike build_call_exr
2659 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2660
2661 static tree
2662 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2663 {
2664 va_list ap;
2665 tree fntype = TREE_TYPE (fndecl);
2666 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2667
2668 va_start (ap, n);
2669 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2670 va_end (ap);
2671 SET_EXPR_LOCATION (fn, loc);
2672 return fn;
2673 }
2674
2675 /* Expand a call to one of the builtin rounding functions gcc defines
2676 as an extension (lfloor and lceil). As these are gcc extensions we
2677 do not need to worry about setting errno to EDOM.
2678 If expanding via optab fails, lower expression to (int)(floor(x)).
2679 EXP is the expression that is a call to the builtin function;
2680 if convenient, the result should be placed in TARGET. */
2681
2682 static rtx
2683 expand_builtin_int_roundingfn (tree exp, rtx target)
2684 {
2685 convert_optab builtin_optab;
2686 rtx op0, tmp;
2687 rtx_insn *insns;
2688 tree fndecl = get_callee_fndecl (exp);
2689 enum built_in_function fallback_fn;
2690 tree fallback_fndecl;
2691 machine_mode mode;
2692 tree arg;
2693
2694 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2695 gcc_unreachable ();
2696
2697 arg = CALL_EXPR_ARG (exp, 0);
2698
2699 switch (DECL_FUNCTION_CODE (fndecl))
2700 {
2701 CASE_FLT_FN (BUILT_IN_ICEIL):
2702 CASE_FLT_FN (BUILT_IN_LCEIL):
2703 CASE_FLT_FN (BUILT_IN_LLCEIL):
2704 builtin_optab = lceil_optab;
2705 fallback_fn = BUILT_IN_CEIL;
2706 break;
2707
2708 CASE_FLT_FN (BUILT_IN_IFLOOR):
2709 CASE_FLT_FN (BUILT_IN_LFLOOR):
2710 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2711 builtin_optab = lfloor_optab;
2712 fallback_fn = BUILT_IN_FLOOR;
2713 break;
2714
2715 default:
2716 gcc_unreachable ();
2717 }
2718
2719 /* Make a suitable register to place result in. */
2720 mode = TYPE_MODE (TREE_TYPE (exp));
2721
2722 target = gen_reg_rtx (mode);
2723
2724 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2725 need to expand the argument again. This way, we will not perform
2726 side-effects more the once. */
2727 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2728
2729 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2730
2731 start_sequence ();
2732
2733 /* Compute into TARGET. */
2734 if (expand_sfix_optab (target, op0, builtin_optab))
2735 {
2736 /* Output the entire sequence. */
2737 insns = get_insns ();
2738 end_sequence ();
2739 emit_insn (insns);
2740 return target;
2741 }
2742
2743 /* If we were unable to expand via the builtin, stop the sequence
2744 (without outputting the insns). */
2745 end_sequence ();
2746
2747 /* Fall back to floating point rounding optab. */
2748 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2749
2750 /* For non-C99 targets we may end up without a fallback fndecl here
2751 if the user called __builtin_lfloor directly. In this case emit
2752 a call to the floor/ceil variants nevertheless. This should result
2753 in the best user experience for not full C99 targets. */
2754 if (fallback_fndecl == NULL_TREE)
2755 {
2756 tree fntype;
2757 const char *name = NULL;
2758
2759 switch (DECL_FUNCTION_CODE (fndecl))
2760 {
2761 case BUILT_IN_ICEIL:
2762 case BUILT_IN_LCEIL:
2763 case BUILT_IN_LLCEIL:
2764 name = "ceil";
2765 break;
2766 case BUILT_IN_ICEILF:
2767 case BUILT_IN_LCEILF:
2768 case BUILT_IN_LLCEILF:
2769 name = "ceilf";
2770 break;
2771 case BUILT_IN_ICEILL:
2772 case BUILT_IN_LCEILL:
2773 case BUILT_IN_LLCEILL:
2774 name = "ceill";
2775 break;
2776 case BUILT_IN_IFLOOR:
2777 case BUILT_IN_LFLOOR:
2778 case BUILT_IN_LLFLOOR:
2779 name = "floor";
2780 break;
2781 case BUILT_IN_IFLOORF:
2782 case BUILT_IN_LFLOORF:
2783 case BUILT_IN_LLFLOORF:
2784 name = "floorf";
2785 break;
2786 case BUILT_IN_IFLOORL:
2787 case BUILT_IN_LFLOORL:
2788 case BUILT_IN_LLFLOORL:
2789 name = "floorl";
2790 break;
2791 default:
2792 gcc_unreachable ();
2793 }
2794
2795 fntype = build_function_type_list (TREE_TYPE (arg),
2796 TREE_TYPE (arg), NULL_TREE);
2797 fallback_fndecl = build_fn_decl (name, fntype);
2798 }
2799
2800 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2801
2802 tmp = expand_normal (exp);
2803 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2804
2805 /* Truncate the result of floating point optab to integer
2806 via expand_fix (). */
2807 target = gen_reg_rtx (mode);
2808 expand_fix (target, tmp, 0);
2809
2810 return target;
2811 }
2812
2813 /* Expand a call to one of the builtin math functions doing integer
2814 conversion (lrint).
2815 Return 0 if a normal call should be emitted rather than expanding the
2816 function in-line. EXP is the expression that is a call to the builtin
2817 function; if convenient, the result should be placed in TARGET. */
2818
2819 static rtx
2820 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2821 {
2822 convert_optab builtin_optab;
2823 rtx op0;
2824 rtx_insn *insns;
2825 tree fndecl = get_callee_fndecl (exp);
2826 tree arg;
2827 machine_mode mode;
2828 enum built_in_function fallback_fn = BUILT_IN_NONE;
2829
2830 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2831 gcc_unreachable ();
2832
2833 arg = CALL_EXPR_ARG (exp, 0);
2834
2835 switch (DECL_FUNCTION_CODE (fndecl))
2836 {
2837 CASE_FLT_FN (BUILT_IN_IRINT):
2838 fallback_fn = BUILT_IN_LRINT;
2839 /* FALLTHRU */
2840 CASE_FLT_FN (BUILT_IN_LRINT):
2841 CASE_FLT_FN (BUILT_IN_LLRINT):
2842 builtin_optab = lrint_optab;
2843 break;
2844
2845 CASE_FLT_FN (BUILT_IN_IROUND):
2846 fallback_fn = BUILT_IN_LROUND;
2847 /* FALLTHRU */
2848 CASE_FLT_FN (BUILT_IN_LROUND):
2849 CASE_FLT_FN (BUILT_IN_LLROUND):
2850 builtin_optab = lround_optab;
2851 break;
2852
2853 default:
2854 gcc_unreachable ();
2855 }
2856
2857 /* There's no easy way to detect the case we need to set EDOM. */
2858 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2859 return NULL_RTX;
2860
2861 /* Make a suitable register to place result in. */
2862 mode = TYPE_MODE (TREE_TYPE (exp));
2863
2864 /* There's no easy way to detect the case we need to set EDOM. */
2865 if (!flag_errno_math)
2866 {
2867 rtx result = gen_reg_rtx (mode);
2868
2869 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2870 need to expand the argument again. This way, we will not perform
2871 side-effects more the once. */
2872 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2873
2874 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2875
2876 start_sequence ();
2877
2878 if (expand_sfix_optab (result, op0, builtin_optab))
2879 {
2880 /* Output the entire sequence. */
2881 insns = get_insns ();
2882 end_sequence ();
2883 emit_insn (insns);
2884 return result;
2885 }
2886
2887 /* If we were unable to expand via the builtin, stop the sequence
2888 (without outputting the insns) and call to the library function
2889 with the stabilized argument list. */
2890 end_sequence ();
2891 }
2892
2893 if (fallback_fn != BUILT_IN_NONE)
2894 {
2895 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2896 targets, (int) round (x) should never be transformed into
2897 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2898 a call to lround in the hope that the target provides at least some
2899 C99 functions. This should result in the best user experience for
2900 not full C99 targets. */
2901 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2902 fallback_fn, 0);
2903
2904 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2905 fallback_fndecl, 1, arg);
2906
2907 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2908 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2909 return convert_to_mode (mode, target, 0);
2910 }
2911
2912 return expand_call (exp, target, target == const0_rtx);
2913 }
2914
2915 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2916 a normal call should be emitted rather than expanding the function
2917 in-line. EXP is the expression that is a call to the builtin
2918 function; if convenient, the result should be placed in TARGET. */
2919
2920 static rtx
2921 expand_builtin_powi (tree exp, rtx target)
2922 {
2923 tree arg0, arg1;
2924 rtx op0, op1;
2925 machine_mode mode;
2926 machine_mode mode2;
2927
2928 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2929 return NULL_RTX;
2930
2931 arg0 = CALL_EXPR_ARG (exp, 0);
2932 arg1 = CALL_EXPR_ARG (exp, 1);
2933 mode = TYPE_MODE (TREE_TYPE (exp));
2934
2935 /* Emit a libcall to libgcc. */
2936
2937 /* Mode of the 2nd argument must match that of an int. */
2938 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2939
2940 if (target == NULL_RTX)
2941 target = gen_reg_rtx (mode);
2942
2943 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2944 if (GET_MODE (op0) != mode)
2945 op0 = convert_to_mode (mode, op0, 0);
2946 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2947 if (GET_MODE (op1) != mode2)
2948 op1 = convert_to_mode (mode2, op1, 0);
2949
2950 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2951 target, LCT_CONST, mode, 2,
2952 op0, mode, op1, mode2);
2953
2954 return target;
2955 }
2956
2957 /* Expand expression EXP which is a call to the strlen builtin. Return
2958 NULL_RTX if we failed the caller should emit a normal call, otherwise
2959 try to get the result in TARGET, if convenient. */
2960
2961 static rtx
2962 expand_builtin_strlen (tree exp, rtx target,
2963 machine_mode target_mode)
2964 {
2965 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2966 return NULL_RTX;
2967 else
2968 {
2969 struct expand_operand ops[4];
2970 rtx pat;
2971 tree len;
2972 tree src = CALL_EXPR_ARG (exp, 0);
2973 rtx src_reg;
2974 rtx_insn *before_strlen;
2975 machine_mode insn_mode = target_mode;
2976 enum insn_code icode = CODE_FOR_nothing;
2977 unsigned int align;
2978
2979 /* If the length can be computed at compile-time, return it. */
2980 len = c_strlen (src, 0);
2981 if (len)
2982 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2983
2984 /* If the length can be computed at compile-time and is constant
2985 integer, but there are side-effects in src, evaluate
2986 src for side-effects, then return len.
2987 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2988 can be optimized into: i++; x = 3; */
2989 len = c_strlen (src, 1);
2990 if (len && TREE_CODE (len) == INTEGER_CST)
2991 {
2992 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2993 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2994 }
2995
2996 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2997
2998 /* If SRC is not a pointer type, don't do this operation inline. */
2999 if (align == 0)
3000 return NULL_RTX;
3001
3002 /* Bail out if we can't compute strlen in the right mode. */
3003 while (insn_mode != VOIDmode)
3004 {
3005 icode = optab_handler (strlen_optab, insn_mode);
3006 if (icode != CODE_FOR_nothing)
3007 break;
3008
3009 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3010 }
3011 if (insn_mode == VOIDmode)
3012 return NULL_RTX;
3013
3014 /* Make a place to hold the source address. We will not expand
3015 the actual source until we are sure that the expansion will
3016 not fail -- there are trees that cannot be expanded twice. */
3017 src_reg = gen_reg_rtx (Pmode);
3018
3019 /* Mark the beginning of the strlen sequence so we can emit the
3020 source operand later. */
3021 before_strlen = get_last_insn ();
3022
3023 create_output_operand (&ops[0], target, insn_mode);
3024 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3025 create_integer_operand (&ops[2], 0);
3026 create_integer_operand (&ops[3], align);
3027 if (!maybe_expand_insn (icode, 4, ops))
3028 return NULL_RTX;
3029
3030 /* Now that we are assured of success, expand the source. */
3031 start_sequence ();
3032 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3033 if (pat != src_reg)
3034 {
3035 #ifdef POINTERS_EXTEND_UNSIGNED
3036 if (GET_MODE (pat) != Pmode)
3037 pat = convert_to_mode (Pmode, pat,
3038 POINTERS_EXTEND_UNSIGNED);
3039 #endif
3040 emit_move_insn (src_reg, pat);
3041 }
3042 pat = get_insns ();
3043 end_sequence ();
3044
3045 if (before_strlen)
3046 emit_insn_after (pat, before_strlen);
3047 else
3048 emit_insn_before (pat, get_insns ());
3049
3050 /* Return the value in the proper mode for this function. */
3051 if (GET_MODE (ops[0].value) == target_mode)
3052 target = ops[0].value;
3053 else if (target != 0)
3054 convert_move (target, ops[0].value, 0);
3055 else
3056 target = convert_to_mode (target_mode, ops[0].value, 0);
3057
3058 return target;
3059 }
3060 }
3061
3062 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3063 bytes from constant string DATA + OFFSET and return it as target
3064 constant. */
3065
3066 static rtx
3067 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3068 machine_mode mode)
3069 {
3070 const char *str = (const char *) data;
3071
3072 gcc_assert (offset >= 0
3073 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3074 <= strlen (str) + 1));
3075
3076 return c_readstr (str + offset, mode);
3077 }
3078
3079 /* LEN specify length of the block of memcpy/memset operation.
3080 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3081 In some cases we can make very likely guess on max size, then we
3082 set it into PROBABLE_MAX_SIZE. */
3083
3084 static void
3085 determine_block_size (tree len, rtx len_rtx,
3086 unsigned HOST_WIDE_INT *min_size,
3087 unsigned HOST_WIDE_INT *max_size,
3088 unsigned HOST_WIDE_INT *probable_max_size)
3089 {
3090 if (CONST_INT_P (len_rtx))
3091 {
3092 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3093 return;
3094 }
3095 else
3096 {
3097 wide_int min, max;
3098 enum value_range_type range_type = VR_UNDEFINED;
3099
3100 /* Determine bounds from the type. */
3101 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3102 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3103 else
3104 *min_size = 0;
3105 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3106 *probable_max_size = *max_size
3107 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3108 else
3109 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3110
3111 if (TREE_CODE (len) == SSA_NAME)
3112 range_type = get_range_info (len, &min, &max);
3113 if (range_type == VR_RANGE)
3114 {
3115 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3116 *min_size = min.to_uhwi ();
3117 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3118 *probable_max_size = *max_size = max.to_uhwi ();
3119 }
3120 else if (range_type == VR_ANTI_RANGE)
3121 {
3122 /* Anti range 0...N lets us to determine minimal size to N+1. */
3123 if (min == 0)
3124 {
3125 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3126 *min_size = max.to_uhwi () + 1;
3127 }
3128 /* Code like
3129
3130 int n;
3131 if (n < 100)
3132 memcpy (a, b, n)
3133
3134 Produce anti range allowing negative values of N. We still
3135 can use the information and make a guess that N is not negative.
3136 */
3137 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3138 *probable_max_size = min.to_uhwi () - 1;
3139 }
3140 }
3141 gcc_checking_assert (*max_size <=
3142 (unsigned HOST_WIDE_INT)
3143 GET_MODE_MASK (GET_MODE (len_rtx)));
3144 }
3145
3146 /* Helper function to do the actual work for expand_builtin_memcpy. */
3147
3148 static rtx
3149 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3150 {
3151 const char *src_str;
3152 unsigned int src_align = get_pointer_alignment (src);
3153 unsigned int dest_align = get_pointer_alignment (dest);
3154 rtx dest_mem, src_mem, dest_addr, len_rtx;
3155 HOST_WIDE_INT expected_size = -1;
3156 unsigned int expected_align = 0;
3157 unsigned HOST_WIDE_INT min_size;
3158 unsigned HOST_WIDE_INT max_size;
3159 unsigned HOST_WIDE_INT probable_max_size;
3160
3161 /* If DEST is not a pointer type, call the normal function. */
3162 if (dest_align == 0)
3163 return NULL_RTX;
3164
3165 /* If either SRC is not a pointer type, don't do this
3166 operation in-line. */
3167 if (src_align == 0)
3168 return NULL_RTX;
3169
3170 if (currently_expanding_gimple_stmt)
3171 stringop_block_profile (currently_expanding_gimple_stmt,
3172 &expected_align, &expected_size);
3173
3174 if (expected_align < dest_align)
3175 expected_align = dest_align;
3176 dest_mem = get_memory_rtx (dest, len);
3177 set_mem_align (dest_mem, dest_align);
3178 len_rtx = expand_normal (len);
3179 determine_block_size (len, len_rtx, &min_size, &max_size,
3180 &probable_max_size);
3181 src_str = c_getstr (src);
3182
3183 /* If SRC is a string constant and block move would be done
3184 by pieces, we can avoid loading the string from memory
3185 and only stored the computed constants. */
3186 if (src_str
3187 && CONST_INT_P (len_rtx)
3188 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3189 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3190 CONST_CAST (char *, src_str),
3191 dest_align, false))
3192 {
3193 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3194 builtin_memcpy_read_str,
3195 CONST_CAST (char *, src_str),
3196 dest_align, false, 0);
3197 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3198 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3199 return dest_mem;
3200 }
3201
3202 src_mem = get_memory_rtx (src, len);
3203 set_mem_align (src_mem, src_align);
3204
3205 /* Copy word part most expediently. */
3206 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3207 CALL_EXPR_TAILCALL (exp)
3208 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3209 expected_align, expected_size,
3210 min_size, max_size, probable_max_size);
3211
3212 if (dest_addr == 0)
3213 {
3214 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3215 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3216 }
3217
3218 return dest_addr;
3219 }
3220
3221 /* Expand a call EXP to the memcpy builtin.
3222 Return NULL_RTX if we failed, the caller should emit a normal call,
3223 otherwise try to get the result in TARGET, if convenient (and in
3224 mode MODE if that's convenient). */
3225
3226 static rtx
3227 expand_builtin_memcpy (tree exp, rtx target)
3228 {
3229 if (!validate_arglist (exp,
3230 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3231 return NULL_RTX;
3232 else
3233 {
3234 tree dest = CALL_EXPR_ARG (exp, 0);
3235 tree src = CALL_EXPR_ARG (exp, 1);
3236 tree len = CALL_EXPR_ARG (exp, 2);
3237 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3238 }
3239 }
3240
3241 /* Expand an instrumented call EXP to the memcpy builtin.
3242 Return NULL_RTX if we failed, the caller should emit a normal call,
3243 otherwise try to get the result in TARGET, if convenient (and in
3244 mode MODE if that's convenient). */
3245
3246 static rtx
3247 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3248 {
3249 if (!validate_arglist (exp,
3250 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3251 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3252 INTEGER_TYPE, VOID_TYPE))
3253 return NULL_RTX;
3254 else
3255 {
3256 tree dest = CALL_EXPR_ARG (exp, 0);
3257 tree src = CALL_EXPR_ARG (exp, 2);
3258 tree len = CALL_EXPR_ARG (exp, 4);
3259 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3260
3261 /* Return src bounds with the result. */
3262 if (res)
3263 {
3264 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3265 expand_normal (CALL_EXPR_ARG (exp, 1)));
3266 res = chkp_join_splitted_slot (res, bnd);
3267 }
3268 return res;
3269 }
3270 }
3271
3272 /* Expand a call EXP to the mempcpy builtin.
3273 Return NULL_RTX if we failed; the caller should emit a normal call,
3274 otherwise try to get the result in TARGET, if convenient (and in
3275 mode MODE if that's convenient). If ENDP is 0 return the
3276 destination pointer, if ENDP is 1 return the end pointer ala
3277 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3278 stpcpy. */
3279
3280 static rtx
3281 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3282 {
3283 if (!validate_arglist (exp,
3284 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3285 return NULL_RTX;
3286 else
3287 {
3288 tree dest = CALL_EXPR_ARG (exp, 0);
3289 tree src = CALL_EXPR_ARG (exp, 1);
3290 tree len = CALL_EXPR_ARG (exp, 2);
3291 return expand_builtin_mempcpy_args (dest, src, len,
3292 target, mode, /*endp=*/ 1,
3293 exp);
3294 }
3295 }
3296
3297 /* Expand an instrumented call EXP to the mempcpy builtin.
3298 Return NULL_RTX if we failed, the caller should emit a normal call,
3299 otherwise try to get the result in TARGET, if convenient (and in
3300 mode MODE if that's convenient). */
3301
3302 static rtx
3303 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3304 {
3305 if (!validate_arglist (exp,
3306 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3307 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3308 INTEGER_TYPE, VOID_TYPE))
3309 return NULL_RTX;
3310 else
3311 {
3312 tree dest = CALL_EXPR_ARG (exp, 0);
3313 tree src = CALL_EXPR_ARG (exp, 2);
3314 tree len = CALL_EXPR_ARG (exp, 4);
3315 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3316 mode, 1, exp);
3317
3318 /* Return src bounds with the result. */
3319 if (res)
3320 {
3321 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3322 expand_normal (CALL_EXPR_ARG (exp, 1)));
3323 res = chkp_join_splitted_slot (res, bnd);
3324 }
3325 return res;
3326 }
3327 }
3328
3329 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3330 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3331 so that this can also be called without constructing an actual CALL_EXPR.
3332 The other arguments and return value are the same as for
3333 expand_builtin_mempcpy. */
3334
3335 static rtx
3336 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3337 rtx target, machine_mode mode, int endp,
3338 tree orig_exp)
3339 {
3340 tree fndecl = get_callee_fndecl (orig_exp);
3341
3342 /* If return value is ignored, transform mempcpy into memcpy. */
3343 if (target == const0_rtx
3344 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3345 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3346 {
3347 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3348 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3349 dest, src, len);
3350 return expand_expr (result, target, mode, EXPAND_NORMAL);
3351 }
3352 else if (target == const0_rtx
3353 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3354 {
3355 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3356 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3357 dest, src, len);
3358 return expand_expr (result, target, mode, EXPAND_NORMAL);
3359 }
3360 else
3361 {
3362 const char *src_str;
3363 unsigned int src_align = get_pointer_alignment (src);
3364 unsigned int dest_align = get_pointer_alignment (dest);
3365 rtx dest_mem, src_mem, len_rtx;
3366
3367 /* If either SRC or DEST is not a pointer type, don't do this
3368 operation in-line. */
3369 if (dest_align == 0 || src_align == 0)
3370 return NULL_RTX;
3371
3372 /* If LEN is not constant, call the normal function. */
3373 if (! tree_fits_uhwi_p (len))
3374 return NULL_RTX;
3375
3376 len_rtx = expand_normal (len);
3377 src_str = c_getstr (src);
3378
3379 /* If SRC is a string constant and block move would be done
3380 by pieces, we can avoid loading the string from memory
3381 and only stored the computed constants. */
3382 if (src_str
3383 && CONST_INT_P (len_rtx)
3384 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3385 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3386 CONST_CAST (char *, src_str),
3387 dest_align, false))
3388 {
3389 dest_mem = get_memory_rtx (dest, len);
3390 set_mem_align (dest_mem, dest_align);
3391 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3392 builtin_memcpy_read_str,
3393 CONST_CAST (char *, src_str),
3394 dest_align, false, endp);
3395 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3396 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3397 return dest_mem;
3398 }
3399
3400 if (CONST_INT_P (len_rtx)
3401 && can_move_by_pieces (INTVAL (len_rtx),
3402 MIN (dest_align, src_align)))
3403 {
3404 dest_mem = get_memory_rtx (dest, len);
3405 set_mem_align (dest_mem, dest_align);
3406 src_mem = get_memory_rtx (src, len);
3407 set_mem_align (src_mem, src_align);
3408 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3409 MIN (dest_align, src_align), endp);
3410 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3411 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3412 return dest_mem;
3413 }
3414
3415 return NULL_RTX;
3416 }
3417 }
3418
3419 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3420 we failed, the caller should emit a normal call, otherwise try to
3421 get the result in TARGET, if convenient. If ENDP is 0 return the
3422 destination pointer, if ENDP is 1 return the end pointer ala
3423 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3424 stpcpy. */
3425
3426 static rtx
3427 expand_movstr (tree dest, tree src, rtx target, int endp)
3428 {
3429 struct expand_operand ops[3];
3430 rtx dest_mem;
3431 rtx src_mem;
3432
3433 if (!targetm.have_movstr ())
3434 return NULL_RTX;
3435
3436 dest_mem = get_memory_rtx (dest, NULL);
3437 src_mem = get_memory_rtx (src, NULL);
3438 if (!endp)
3439 {
3440 target = force_reg (Pmode, XEXP (dest_mem, 0));
3441 dest_mem = replace_equiv_address (dest_mem, target);
3442 }
3443
3444 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3445 create_fixed_operand (&ops[1], dest_mem);
3446 create_fixed_operand (&ops[2], src_mem);
3447 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3448 return NULL_RTX;
3449
3450 if (endp && target != const0_rtx)
3451 {
3452 target = ops[0].value;
3453 /* movstr is supposed to set end to the address of the NUL
3454 terminator. If the caller requested a mempcpy-like return value,
3455 adjust it. */
3456 if (endp == 1)
3457 {
3458 rtx tem = plus_constant (GET_MODE (target),
3459 gen_lowpart (GET_MODE (target), target), 1);
3460 emit_move_insn (target, force_operand (tem, NULL_RTX));
3461 }
3462 }
3463 return target;
3464 }
3465
3466 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3467 NULL_RTX if we failed the caller should emit a normal call, otherwise
3468 try to get the result in TARGET, if convenient (and in mode MODE if that's
3469 convenient). */
3470
3471 static rtx
3472 expand_builtin_strcpy (tree exp, rtx target)
3473 {
3474 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3475 {
3476 tree dest = CALL_EXPR_ARG (exp, 0);
3477 tree src = CALL_EXPR_ARG (exp, 1);
3478 return expand_builtin_strcpy_args (dest, src, target);
3479 }
3480 return NULL_RTX;
3481 }
3482
3483 /* Helper function to do the actual work for expand_builtin_strcpy. The
3484 arguments to the builtin_strcpy call DEST and SRC are broken out
3485 so that this can also be called without constructing an actual CALL_EXPR.
3486 The other arguments and return value are the same as for
3487 expand_builtin_strcpy. */
3488
3489 static rtx
3490 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3491 {
3492 return expand_movstr (dest, src, target, /*endp=*/0);
3493 }
3494
3495 /* Expand a call EXP to the stpcpy builtin.
3496 Return NULL_RTX if we failed the caller should emit a normal call,
3497 otherwise try to get the result in TARGET, if convenient (and in
3498 mode MODE if that's convenient). */
3499
3500 static rtx
3501 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3502 {
3503 tree dst, src;
3504 location_t loc = EXPR_LOCATION (exp);
3505
3506 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3507 return NULL_RTX;
3508
3509 dst = CALL_EXPR_ARG (exp, 0);
3510 src = CALL_EXPR_ARG (exp, 1);
3511
3512 /* If return value is ignored, transform stpcpy into strcpy. */
3513 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3514 {
3515 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3516 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3517 return expand_expr (result, target, mode, EXPAND_NORMAL);
3518 }
3519 else
3520 {
3521 tree len, lenp1;
3522 rtx ret;
3523
3524 /* Ensure we get an actual string whose length can be evaluated at
3525 compile-time, not an expression containing a string. This is
3526 because the latter will potentially produce pessimized code
3527 when used to produce the return value. */
3528 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3529 return expand_movstr (dst, src, target, /*endp=*/2);
3530
3531 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3532 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3533 target, mode, /*endp=*/2,
3534 exp);
3535
3536 if (ret)
3537 return ret;
3538
3539 if (TREE_CODE (len) == INTEGER_CST)
3540 {
3541 rtx len_rtx = expand_normal (len);
3542
3543 if (CONST_INT_P (len_rtx))
3544 {
3545 ret = expand_builtin_strcpy_args (dst, src, target);
3546
3547 if (ret)
3548 {
3549 if (! target)
3550 {
3551 if (mode != VOIDmode)
3552 target = gen_reg_rtx (mode);
3553 else
3554 target = gen_reg_rtx (GET_MODE (ret));
3555 }
3556 if (GET_MODE (target) != GET_MODE (ret))
3557 ret = gen_lowpart (GET_MODE (target), ret);
3558
3559 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3560 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3561 gcc_assert (ret);
3562
3563 return target;
3564 }
3565 }
3566 }
3567
3568 return expand_movstr (dst, src, target, /*endp=*/2);
3569 }
3570 }
3571
3572 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3573 bytes from constant string DATA + OFFSET and return it as target
3574 constant. */
3575
3576 rtx
3577 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3578 machine_mode mode)
3579 {
3580 const char *str = (const char *) data;
3581
3582 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3583 return const0_rtx;
3584
3585 return c_readstr (str + offset, mode);
3586 }
3587
3588 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3589 NULL_RTX if we failed the caller should emit a normal call. */
3590
3591 static rtx
3592 expand_builtin_strncpy (tree exp, rtx target)
3593 {
3594 location_t loc = EXPR_LOCATION (exp);
3595
3596 if (validate_arglist (exp,
3597 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3598 {
3599 tree dest = CALL_EXPR_ARG (exp, 0);
3600 tree src = CALL_EXPR_ARG (exp, 1);
3601 tree len = CALL_EXPR_ARG (exp, 2);
3602 tree slen = c_strlen (src, 1);
3603
3604 /* We must be passed a constant len and src parameter. */
3605 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3606 return NULL_RTX;
3607
3608 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3609
3610 /* We're required to pad with trailing zeros if the requested
3611 len is greater than strlen(s2)+1. In that case try to
3612 use store_by_pieces, if it fails, punt. */
3613 if (tree_int_cst_lt (slen, len))
3614 {
3615 unsigned int dest_align = get_pointer_alignment (dest);
3616 const char *p = c_getstr (src);
3617 rtx dest_mem;
3618
3619 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3620 || !can_store_by_pieces (tree_to_uhwi (len),
3621 builtin_strncpy_read_str,
3622 CONST_CAST (char *, p),
3623 dest_align, false))
3624 return NULL_RTX;
3625
3626 dest_mem = get_memory_rtx (dest, len);
3627 store_by_pieces (dest_mem, tree_to_uhwi (len),
3628 builtin_strncpy_read_str,
3629 CONST_CAST (char *, p), dest_align, false, 0);
3630 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3631 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3632 return dest_mem;
3633 }
3634 }
3635 return NULL_RTX;
3636 }
3637
3638 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3639 bytes from constant string DATA + OFFSET and return it as target
3640 constant. */
3641
3642 rtx
3643 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3644 machine_mode mode)
3645 {
3646 const char *c = (const char *) data;
3647 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3648
3649 memset (p, *c, GET_MODE_SIZE (mode));
3650
3651 return c_readstr (p, mode);
3652 }
3653
3654 /* Callback routine for store_by_pieces. Return the RTL of a register
3655 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3656 char value given in the RTL register data. For example, if mode is
3657 4 bytes wide, return the RTL for 0x01010101*data. */
3658
3659 static rtx
3660 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3661 machine_mode mode)
3662 {
3663 rtx target, coeff;
3664 size_t size;
3665 char *p;
3666
3667 size = GET_MODE_SIZE (mode);
3668 if (size == 1)
3669 return (rtx) data;
3670
3671 p = XALLOCAVEC (char, size);
3672 memset (p, 1, size);
3673 coeff = c_readstr (p, mode);
3674
3675 target = convert_to_mode (mode, (rtx) data, 1);
3676 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3677 return force_reg (mode, target);
3678 }
3679
3680 /* Expand expression EXP, which is a call to the memset builtin. Return
3681 NULL_RTX if we failed the caller should emit a normal call, otherwise
3682 try to get the result in TARGET, if convenient (and in mode MODE if that's
3683 convenient). */
3684
3685 static rtx
3686 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3687 {
3688 if (!validate_arglist (exp,
3689 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3690 return NULL_RTX;
3691 else
3692 {
3693 tree dest = CALL_EXPR_ARG (exp, 0);
3694 tree val = CALL_EXPR_ARG (exp, 1);
3695 tree len = CALL_EXPR_ARG (exp, 2);
3696 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3697 }
3698 }
3699
3700 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3701 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3702 try to get the result in TARGET, if convenient (and in mode MODE if that's
3703 convenient). */
3704
3705 static rtx
3706 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3707 {
3708 if (!validate_arglist (exp,
3709 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3710 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3711 return NULL_RTX;
3712 else
3713 {
3714 tree dest = CALL_EXPR_ARG (exp, 0);
3715 tree val = CALL_EXPR_ARG (exp, 2);
3716 tree len = CALL_EXPR_ARG (exp, 3);
3717 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3718
3719 /* Return src bounds with the result. */
3720 if (res)
3721 {
3722 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3723 expand_normal (CALL_EXPR_ARG (exp, 1)));
3724 res = chkp_join_splitted_slot (res, bnd);
3725 }
3726 return res;
3727 }
3728 }
3729
3730 /* Helper function to do the actual work for expand_builtin_memset. The
3731 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3732 so that this can also be called without constructing an actual CALL_EXPR.
3733 The other arguments and return value are the same as for
3734 expand_builtin_memset. */
3735
3736 static rtx
3737 expand_builtin_memset_args (tree dest, tree val, tree len,
3738 rtx target, machine_mode mode, tree orig_exp)
3739 {
3740 tree fndecl, fn;
3741 enum built_in_function fcode;
3742 machine_mode val_mode;
3743 char c;
3744 unsigned int dest_align;
3745 rtx dest_mem, dest_addr, len_rtx;
3746 HOST_WIDE_INT expected_size = -1;
3747 unsigned int expected_align = 0;
3748 unsigned HOST_WIDE_INT min_size;
3749 unsigned HOST_WIDE_INT max_size;
3750 unsigned HOST_WIDE_INT probable_max_size;
3751
3752 dest_align = get_pointer_alignment (dest);
3753
3754 /* If DEST is not a pointer type, don't do this operation in-line. */
3755 if (dest_align == 0)
3756 return NULL_RTX;
3757
3758 if (currently_expanding_gimple_stmt)
3759 stringop_block_profile (currently_expanding_gimple_stmt,
3760 &expected_align, &expected_size);
3761
3762 if (expected_align < dest_align)
3763 expected_align = dest_align;
3764
3765 /* If the LEN parameter is zero, return DEST. */
3766 if (integer_zerop (len))
3767 {
3768 /* Evaluate and ignore VAL in case it has side-effects. */
3769 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3770 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3771 }
3772
3773 /* Stabilize the arguments in case we fail. */
3774 dest = builtin_save_expr (dest);
3775 val = builtin_save_expr (val);
3776 len = builtin_save_expr (len);
3777
3778 len_rtx = expand_normal (len);
3779 determine_block_size (len, len_rtx, &min_size, &max_size,
3780 &probable_max_size);
3781 dest_mem = get_memory_rtx (dest, len);
3782 val_mode = TYPE_MODE (unsigned_char_type_node);
3783
3784 if (TREE_CODE (val) != INTEGER_CST)
3785 {
3786 rtx val_rtx;
3787
3788 val_rtx = expand_normal (val);
3789 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3790
3791 /* Assume that we can memset by pieces if we can store
3792 * the coefficients by pieces (in the required modes).
3793 * We can't pass builtin_memset_gen_str as that emits RTL. */
3794 c = 1;
3795 if (tree_fits_uhwi_p (len)
3796 && can_store_by_pieces (tree_to_uhwi (len),
3797 builtin_memset_read_str, &c, dest_align,
3798 true))
3799 {
3800 val_rtx = force_reg (val_mode, val_rtx);
3801 store_by_pieces (dest_mem, tree_to_uhwi (len),
3802 builtin_memset_gen_str, val_rtx, dest_align,
3803 true, 0);
3804 }
3805 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3806 dest_align, expected_align,
3807 expected_size, min_size, max_size,
3808 probable_max_size))
3809 goto do_libcall;
3810
3811 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3812 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3813 return dest_mem;
3814 }
3815
3816 if (target_char_cast (val, &c))
3817 goto do_libcall;
3818
3819 if (c)
3820 {
3821 if (tree_fits_uhwi_p (len)
3822 && can_store_by_pieces (tree_to_uhwi (len),
3823 builtin_memset_read_str, &c, dest_align,
3824 true))
3825 store_by_pieces (dest_mem, tree_to_uhwi (len),
3826 builtin_memset_read_str, &c, dest_align, true, 0);
3827 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3828 gen_int_mode (c, val_mode),
3829 dest_align, expected_align,
3830 expected_size, min_size, max_size,
3831 probable_max_size))
3832 goto do_libcall;
3833
3834 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3835 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3836 return dest_mem;
3837 }
3838
3839 set_mem_align (dest_mem, dest_align);
3840 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3841 CALL_EXPR_TAILCALL (orig_exp)
3842 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3843 expected_align, expected_size,
3844 min_size, max_size,
3845 probable_max_size);
3846
3847 if (dest_addr == 0)
3848 {
3849 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3850 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3851 }
3852
3853 return dest_addr;
3854
3855 do_libcall:
3856 fndecl = get_callee_fndecl (orig_exp);
3857 fcode = DECL_FUNCTION_CODE (fndecl);
3858 if (fcode == BUILT_IN_MEMSET
3859 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3860 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3861 dest, val, len);
3862 else if (fcode == BUILT_IN_BZERO)
3863 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3864 dest, len);
3865 else
3866 gcc_unreachable ();
3867 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3868 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3869 return expand_call (fn, target, target == const0_rtx);
3870 }
3871
3872 /* Expand expression EXP, which is a call to the bzero builtin. Return
3873 NULL_RTX if we failed the caller should emit a normal call. */
3874
3875 static rtx
3876 expand_builtin_bzero (tree exp)
3877 {
3878 tree dest, size;
3879 location_t loc = EXPR_LOCATION (exp);
3880
3881 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3882 return NULL_RTX;
3883
3884 dest = CALL_EXPR_ARG (exp, 0);
3885 size = CALL_EXPR_ARG (exp, 1);
3886
3887 /* New argument list transforming bzero(ptr x, int y) to
3888 memset(ptr x, int 0, size_t y). This is done this way
3889 so that if it isn't expanded inline, we fallback to
3890 calling bzero instead of memset. */
3891
3892 return expand_builtin_memset_args (dest, integer_zero_node,
3893 fold_convert_loc (loc,
3894 size_type_node, size),
3895 const0_rtx, VOIDmode, exp);
3896 }
3897
3898 /* Try to expand cmpstr operation ICODE with the given operands.
3899 Return the result rtx on success, otherwise return null. */
3900
3901 static rtx
3902 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3903 HOST_WIDE_INT align)
3904 {
3905 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3906
3907 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3908 target = NULL_RTX;
3909
3910 struct expand_operand ops[4];
3911 create_output_operand (&ops[0], target, insn_mode);
3912 create_fixed_operand (&ops[1], arg1_rtx);
3913 create_fixed_operand (&ops[2], arg2_rtx);
3914 create_integer_operand (&ops[3], align);
3915 if (maybe_expand_insn (icode, 4, ops))
3916 return ops[0].value;
3917 return NULL_RTX;
3918 }
3919
3920 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3921 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3922 otherwise return null. */
3923
3924 static rtx
3925 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3926 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3927 HOST_WIDE_INT align)
3928 {
3929 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3930
3931 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3932 target = NULL_RTX;
3933
3934 struct expand_operand ops[5];
3935 create_output_operand (&ops[0], target, insn_mode);
3936 create_fixed_operand (&ops[1], arg1_rtx);
3937 create_fixed_operand (&ops[2], arg2_rtx);
3938 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3939 TYPE_UNSIGNED (arg3_type));
3940 create_integer_operand (&ops[4], align);
3941 if (maybe_expand_insn (icode, 5, ops))
3942 return ops[0].value;
3943 return NULL_RTX;
3944 }
3945
3946 /* Expand expression EXP, which is a call to the memcmp built-in function.
3947 Return NULL_RTX if we failed and the caller should emit a normal call,
3948 otherwise try to get the result in TARGET, if convenient. */
3949
3950 static rtx
3951 expand_builtin_memcmp (tree exp, rtx target)
3952 {
3953 if (!validate_arglist (exp,
3954 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3955 return NULL_RTX;
3956
3957 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3958 implementing memcmp because it will stop if it encounters two
3959 zero bytes. */
3960 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3961 if (icode == CODE_FOR_nothing)
3962 return NULL_RTX;
3963
3964 tree arg1 = CALL_EXPR_ARG (exp, 0);
3965 tree arg2 = CALL_EXPR_ARG (exp, 1);
3966 tree len = CALL_EXPR_ARG (exp, 2);
3967
3968 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3969 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3970
3971 /* If we don't have POINTER_TYPE, call the function. */
3972 if (arg1_align == 0 || arg2_align == 0)
3973 return NULL_RTX;
3974
3975 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3976 location_t loc = EXPR_LOCATION (exp);
3977 rtx arg1_rtx = get_memory_rtx (arg1, len);
3978 rtx arg2_rtx = get_memory_rtx (arg2, len);
3979 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3980
3981 /* Set MEM_SIZE as appropriate. */
3982 if (CONST_INT_P (arg3_rtx))
3983 {
3984 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3985 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3986 }
3987
3988 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3989 TREE_TYPE (len), arg3_rtx,
3990 MIN (arg1_align, arg2_align));
3991 if (result)
3992 {
3993 /* Return the value in the proper mode for this function. */
3994 if (GET_MODE (result) == mode)
3995 return result;
3996
3997 if (target != 0)
3998 {
3999 convert_move (target, result, 0);
4000 return target;
4001 }
4002
4003 return convert_to_mode (mode, result, 0);
4004 }
4005
4006 result = target;
4007 if (! (result != 0
4008 && REG_P (result) && GET_MODE (result) == mode
4009 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4010 result = gen_reg_rtx (mode);
4011
4012 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4013 TYPE_MODE (integer_type_node), 3,
4014 XEXP (arg1_rtx, 0), Pmode,
4015 XEXP (arg2_rtx, 0), Pmode,
4016 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4017 TYPE_UNSIGNED (sizetype)),
4018 TYPE_MODE (sizetype));
4019 return result;
4020 }
4021
4022 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4023 if we failed the caller should emit a normal call, otherwise try to get
4024 the result in TARGET, if convenient. */
4025
4026 static rtx
4027 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4028 {
4029 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4030 return NULL_RTX;
4031
4032 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4033 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4034 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4035 {
4036 rtx arg1_rtx, arg2_rtx;
4037 tree fndecl, fn;
4038 tree arg1 = CALL_EXPR_ARG (exp, 0);
4039 tree arg2 = CALL_EXPR_ARG (exp, 1);
4040 rtx result = NULL_RTX;
4041
4042 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4043 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4044
4045 /* If we don't have POINTER_TYPE, call the function. */
4046 if (arg1_align == 0 || arg2_align == 0)
4047 return NULL_RTX;
4048
4049 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4050 arg1 = builtin_save_expr (arg1);
4051 arg2 = builtin_save_expr (arg2);
4052
4053 arg1_rtx = get_memory_rtx (arg1, NULL);
4054 arg2_rtx = get_memory_rtx (arg2, NULL);
4055
4056 /* Try to call cmpstrsi. */
4057 if (cmpstr_icode != CODE_FOR_nothing)
4058 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4059 MIN (arg1_align, arg2_align));
4060
4061 /* Try to determine at least one length and call cmpstrnsi. */
4062 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4063 {
4064 tree len;
4065 rtx arg3_rtx;
4066
4067 tree len1 = c_strlen (arg1, 1);
4068 tree len2 = c_strlen (arg2, 1);
4069
4070 if (len1)
4071 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4072 if (len2)
4073 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4074
4075 /* If we don't have a constant length for the first, use the length
4076 of the second, if we know it. We don't require a constant for
4077 this case; some cost analysis could be done if both are available
4078 but neither is constant. For now, assume they're equally cheap,
4079 unless one has side effects. If both strings have constant lengths,
4080 use the smaller. */
4081
4082 if (!len1)
4083 len = len2;
4084 else if (!len2)
4085 len = len1;
4086 else if (TREE_SIDE_EFFECTS (len1))
4087 len = len2;
4088 else if (TREE_SIDE_EFFECTS (len2))
4089 len = len1;
4090 else if (TREE_CODE (len1) != INTEGER_CST)
4091 len = len2;
4092 else if (TREE_CODE (len2) != INTEGER_CST)
4093 len = len1;
4094 else if (tree_int_cst_lt (len1, len2))
4095 len = len1;
4096 else
4097 len = len2;
4098
4099 /* If both arguments have side effects, we cannot optimize. */
4100 if (len && !TREE_SIDE_EFFECTS (len))
4101 {
4102 arg3_rtx = expand_normal (len);
4103 result = expand_cmpstrn_or_cmpmem
4104 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4105 arg3_rtx, MIN (arg1_align, arg2_align));
4106 }
4107 }
4108
4109 if (result)
4110 {
4111 /* Return the value in the proper mode for this function. */
4112 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4113 if (GET_MODE (result) == mode)
4114 return result;
4115 if (target == 0)
4116 return convert_to_mode (mode, result, 0);
4117 convert_move (target, result, 0);
4118 return target;
4119 }
4120
4121 /* Expand the library call ourselves using a stabilized argument
4122 list to avoid re-evaluating the function's arguments twice. */
4123 fndecl = get_callee_fndecl (exp);
4124 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4125 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4126 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4127 return expand_call (fn, target, target == const0_rtx);
4128 }
4129 return NULL_RTX;
4130 }
4131
4132 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4133 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4134 the result in TARGET, if convenient. */
4135
4136 static rtx
4137 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4138 ATTRIBUTE_UNUSED machine_mode mode)
4139 {
4140 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4141
4142 if (!validate_arglist (exp,
4143 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4144 return NULL_RTX;
4145
4146 /* If c_strlen can determine an expression for one of the string
4147 lengths, and it doesn't have side effects, then emit cmpstrnsi
4148 using length MIN(strlen(string)+1, arg3). */
4149 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4150 if (cmpstrn_icode != CODE_FOR_nothing)
4151 {
4152 tree len, len1, len2;
4153 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4154 rtx result;
4155 tree fndecl, fn;
4156 tree arg1 = CALL_EXPR_ARG (exp, 0);
4157 tree arg2 = CALL_EXPR_ARG (exp, 1);
4158 tree arg3 = CALL_EXPR_ARG (exp, 2);
4159
4160 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4161 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4162
4163 len1 = c_strlen (arg1, 1);
4164 len2 = c_strlen (arg2, 1);
4165
4166 if (len1)
4167 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4168 if (len2)
4169 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4170
4171 /* If we don't have a constant length for the first, use the length
4172 of the second, if we know it. We don't require a constant for
4173 this case; some cost analysis could be done if both are available
4174 but neither is constant. For now, assume they're equally cheap,
4175 unless one has side effects. If both strings have constant lengths,
4176 use the smaller. */
4177
4178 if (!len1)
4179 len = len2;
4180 else if (!len2)
4181 len = len1;
4182 else if (TREE_SIDE_EFFECTS (len1))
4183 len = len2;
4184 else if (TREE_SIDE_EFFECTS (len2))
4185 len = len1;
4186 else if (TREE_CODE (len1) != INTEGER_CST)
4187 len = len2;
4188 else if (TREE_CODE (len2) != INTEGER_CST)
4189 len = len1;
4190 else if (tree_int_cst_lt (len1, len2))
4191 len = len1;
4192 else
4193 len = len2;
4194
4195 /* If both arguments have side effects, we cannot optimize. */
4196 if (!len || TREE_SIDE_EFFECTS (len))
4197 return NULL_RTX;
4198
4199 /* The actual new length parameter is MIN(len,arg3). */
4200 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4201 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4202
4203 /* If we don't have POINTER_TYPE, call the function. */
4204 if (arg1_align == 0 || arg2_align == 0)
4205 return NULL_RTX;
4206
4207 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4208 arg1 = builtin_save_expr (arg1);
4209 arg2 = builtin_save_expr (arg2);
4210 len = builtin_save_expr (len);
4211
4212 arg1_rtx = get_memory_rtx (arg1, len);
4213 arg2_rtx = get_memory_rtx (arg2, len);
4214 arg3_rtx = expand_normal (len);
4215 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4216 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4217 MIN (arg1_align, arg2_align));
4218 if (result)
4219 {
4220 /* Return the value in the proper mode for this function. */
4221 mode = TYPE_MODE (TREE_TYPE (exp));
4222 if (GET_MODE (result) == mode)
4223 return result;
4224 if (target == 0)
4225 return convert_to_mode (mode, result, 0);
4226 convert_move (target, result, 0);
4227 return target;
4228 }
4229
4230 /* Expand the library call ourselves using a stabilized argument
4231 list to avoid re-evaluating the function's arguments twice. */
4232 fndecl = get_callee_fndecl (exp);
4233 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4234 arg1, arg2, len);
4235 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4236 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4237 return expand_call (fn, target, target == const0_rtx);
4238 }
4239 return NULL_RTX;
4240 }
4241
4242 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4243 if that's convenient. */
4244
4245 rtx
4246 expand_builtin_saveregs (void)
4247 {
4248 rtx val;
4249 rtx_insn *seq;
4250
4251 /* Don't do __builtin_saveregs more than once in a function.
4252 Save the result of the first call and reuse it. */
4253 if (saveregs_value != 0)
4254 return saveregs_value;
4255
4256 /* When this function is called, it means that registers must be
4257 saved on entry to this function. So we migrate the call to the
4258 first insn of this function. */
4259
4260 start_sequence ();
4261
4262 /* Do whatever the machine needs done in this case. */
4263 val = targetm.calls.expand_builtin_saveregs ();
4264
4265 seq = get_insns ();
4266 end_sequence ();
4267
4268 saveregs_value = val;
4269
4270 /* Put the insns after the NOTE that starts the function. If this
4271 is inside a start_sequence, make the outer-level insn chain current, so
4272 the code is placed at the start of the function. */
4273 push_topmost_sequence ();
4274 emit_insn_after (seq, entry_of_function ());
4275 pop_topmost_sequence ();
4276
4277 return val;
4278 }
4279
4280 /* Expand a call to __builtin_next_arg. */
4281
4282 static rtx
4283 expand_builtin_next_arg (void)
4284 {
4285 /* Checking arguments is already done in fold_builtin_next_arg
4286 that must be called before this function. */
4287 return expand_binop (ptr_mode, add_optab,
4288 crtl->args.internal_arg_pointer,
4289 crtl->args.arg_offset_rtx,
4290 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4291 }
4292
4293 /* Make it easier for the backends by protecting the valist argument
4294 from multiple evaluations. */
4295
4296 static tree
4297 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4298 {
4299 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4300
4301 /* The current way of determining the type of valist is completely
4302 bogus. We should have the information on the va builtin instead. */
4303 if (!vatype)
4304 vatype = targetm.fn_abi_va_list (cfun->decl);
4305
4306 if (TREE_CODE (vatype) == ARRAY_TYPE)
4307 {
4308 if (TREE_SIDE_EFFECTS (valist))
4309 valist = save_expr (valist);
4310
4311 /* For this case, the backends will be expecting a pointer to
4312 vatype, but it's possible we've actually been given an array
4313 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4314 So fix it. */
4315 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4316 {
4317 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4318 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4319 }
4320 }
4321 else
4322 {
4323 tree pt = build_pointer_type (vatype);
4324
4325 if (! needs_lvalue)
4326 {
4327 if (! TREE_SIDE_EFFECTS (valist))
4328 return valist;
4329
4330 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4331 TREE_SIDE_EFFECTS (valist) = 1;
4332 }
4333
4334 if (TREE_SIDE_EFFECTS (valist))
4335 valist = save_expr (valist);
4336 valist = fold_build2_loc (loc, MEM_REF,
4337 vatype, valist, build_int_cst (pt, 0));
4338 }
4339
4340 return valist;
4341 }
4342
4343 /* The "standard" definition of va_list is void*. */
4344
4345 tree
4346 std_build_builtin_va_list (void)
4347 {
4348 return ptr_type_node;
4349 }
4350
4351 /* The "standard" abi va_list is va_list_type_node. */
4352
4353 tree
4354 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4355 {
4356 return va_list_type_node;
4357 }
4358
4359 /* The "standard" type of va_list is va_list_type_node. */
4360
4361 tree
4362 std_canonical_va_list_type (tree type)
4363 {
4364 tree wtype, htype;
4365
4366 if (INDIRECT_REF_P (type))
4367 type = TREE_TYPE (type);
4368 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4369 type = TREE_TYPE (type);
4370 wtype = va_list_type_node;
4371 htype = type;
4372 /* Treat structure va_list types. */
4373 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4374 htype = TREE_TYPE (htype);
4375 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4376 {
4377 /* If va_list is an array type, the argument may have decayed
4378 to a pointer type, e.g. by being passed to another function.
4379 In that case, unwrap both types so that we can compare the
4380 underlying records. */
4381 if (TREE_CODE (htype) == ARRAY_TYPE
4382 || POINTER_TYPE_P (htype))
4383 {
4384 wtype = TREE_TYPE (wtype);
4385 htype = TREE_TYPE (htype);
4386 }
4387 }
4388 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4389 return va_list_type_node;
4390
4391 return NULL_TREE;
4392 }
4393
4394 /* The "standard" implementation of va_start: just assign `nextarg' to
4395 the variable. */
4396
4397 void
4398 std_expand_builtin_va_start (tree valist, rtx nextarg)
4399 {
4400 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4401 convert_move (va_r, nextarg, 0);
4402
4403 /* We do not have any valid bounds for the pointer, so
4404 just store zero bounds for it. */
4405 if (chkp_function_instrumented_p (current_function_decl))
4406 chkp_expand_bounds_reset_for_mem (valist,
4407 make_tree (TREE_TYPE (valist),
4408 nextarg));
4409 }
4410
4411 /* Expand EXP, a call to __builtin_va_start. */
4412
4413 static rtx
4414 expand_builtin_va_start (tree exp)
4415 {
4416 rtx nextarg;
4417 tree valist;
4418 location_t loc = EXPR_LOCATION (exp);
4419
4420 if (call_expr_nargs (exp) < 2)
4421 {
4422 error_at (loc, "too few arguments to function %<va_start%>");
4423 return const0_rtx;
4424 }
4425
4426 if (fold_builtin_next_arg (exp, true))
4427 return const0_rtx;
4428
4429 nextarg = expand_builtin_next_arg ();
4430 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4431
4432 if (targetm.expand_builtin_va_start)
4433 targetm.expand_builtin_va_start (valist, nextarg);
4434 else
4435 std_expand_builtin_va_start (valist, nextarg);
4436
4437 return const0_rtx;
4438 }
4439
4440 /* Expand EXP, a call to __builtin_va_end. */
4441
4442 static rtx
4443 expand_builtin_va_end (tree exp)
4444 {
4445 tree valist = CALL_EXPR_ARG (exp, 0);
4446
4447 /* Evaluate for side effects, if needed. I hate macros that don't
4448 do that. */
4449 if (TREE_SIDE_EFFECTS (valist))
4450 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4451
4452 return const0_rtx;
4453 }
4454
4455 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4456 builtin rather than just as an assignment in stdarg.h because of the
4457 nastiness of array-type va_list types. */
4458
4459 static rtx
4460 expand_builtin_va_copy (tree exp)
4461 {
4462 tree dst, src, t;
4463 location_t loc = EXPR_LOCATION (exp);
4464
4465 dst = CALL_EXPR_ARG (exp, 0);
4466 src = CALL_EXPR_ARG (exp, 1);
4467
4468 dst = stabilize_va_list_loc (loc, dst, 1);
4469 src = stabilize_va_list_loc (loc, src, 0);
4470
4471 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4472
4473 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4474 {
4475 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4476 TREE_SIDE_EFFECTS (t) = 1;
4477 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4478 }
4479 else
4480 {
4481 rtx dstb, srcb, size;
4482
4483 /* Evaluate to pointers. */
4484 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4485 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4486 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4487 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4488
4489 dstb = convert_memory_address (Pmode, dstb);
4490 srcb = convert_memory_address (Pmode, srcb);
4491
4492 /* "Dereference" to BLKmode memories. */
4493 dstb = gen_rtx_MEM (BLKmode, dstb);
4494 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4495 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4496 srcb = gen_rtx_MEM (BLKmode, srcb);
4497 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4498 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4499
4500 /* Copy. */
4501 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4502 }
4503
4504 return const0_rtx;
4505 }
4506
4507 /* Expand a call to one of the builtin functions __builtin_frame_address or
4508 __builtin_return_address. */
4509
4510 static rtx
4511 expand_builtin_frame_address (tree fndecl, tree exp)
4512 {
4513 /* The argument must be a nonnegative integer constant.
4514 It counts the number of frames to scan up the stack.
4515 The value is either the frame pointer value or the return
4516 address saved in that frame. */
4517 if (call_expr_nargs (exp) == 0)
4518 /* Warning about missing arg was already issued. */
4519 return const0_rtx;
4520 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4521 {
4522 error ("invalid argument to %qD", fndecl);
4523 return const0_rtx;
4524 }
4525 else
4526 {
4527 /* Number of frames to scan up the stack. */
4528 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4529
4530 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4531
4532 /* Some ports cannot access arbitrary stack frames. */
4533 if (tem == NULL)
4534 {
4535 warning (0, "unsupported argument to %qD", fndecl);
4536 return const0_rtx;
4537 }
4538
4539 if (count)
4540 {
4541 /* Warn since no effort is made to ensure that any frame
4542 beyond the current one exists or can be safely reached. */
4543 warning (OPT_Wframe_address, "calling %qD with "
4544 "a nonzero argument is unsafe", fndecl);
4545 }
4546
4547 /* For __builtin_frame_address, return what we've got. */
4548 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4549 return tem;
4550
4551 if (!REG_P (tem)
4552 && ! CONSTANT_P (tem))
4553 tem = copy_addr_to_reg (tem);
4554 return tem;
4555 }
4556 }
4557
4558 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4559 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4560 is the same as for allocate_dynamic_stack_space. */
4561
4562 static rtx
4563 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4564 {
4565 rtx op0;
4566 rtx result;
4567 bool valid_arglist;
4568 unsigned int align;
4569 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4570 == BUILT_IN_ALLOCA_WITH_ALIGN);
4571
4572 valid_arglist
4573 = (alloca_with_align
4574 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4575 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4576
4577 if (!valid_arglist)
4578 return NULL_RTX;
4579
4580 /* Compute the argument. */
4581 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4582
4583 /* Compute the alignment. */
4584 align = (alloca_with_align
4585 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4586 : BIGGEST_ALIGNMENT);
4587
4588 /* Allocate the desired space. */
4589 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4590 result = convert_memory_address (ptr_mode, result);
4591
4592 return result;
4593 }
4594
4595 /* Expand a call to bswap builtin in EXP.
4596 Return NULL_RTX if a normal call should be emitted rather than expanding the
4597 function in-line. If convenient, the result should be placed in TARGET.
4598 SUBTARGET may be used as the target for computing one of EXP's operands. */
4599
4600 static rtx
4601 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4602 rtx subtarget)
4603 {
4604 tree arg;
4605 rtx op0;
4606
4607 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4608 return NULL_RTX;
4609
4610 arg = CALL_EXPR_ARG (exp, 0);
4611 op0 = expand_expr (arg,
4612 subtarget && GET_MODE (subtarget) == target_mode
4613 ? subtarget : NULL_RTX,
4614 target_mode, EXPAND_NORMAL);
4615 if (GET_MODE (op0) != target_mode)
4616 op0 = convert_to_mode (target_mode, op0, 1);
4617
4618 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4619
4620 gcc_assert (target);
4621
4622 return convert_to_mode (target_mode, target, 1);
4623 }
4624
4625 /* Expand a call to a unary builtin in EXP.
4626 Return NULL_RTX if a normal call should be emitted rather than expanding the
4627 function in-line. If convenient, the result should be placed in TARGET.
4628 SUBTARGET may be used as the target for computing one of EXP's operands. */
4629
4630 static rtx
4631 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4632 rtx subtarget, optab op_optab)
4633 {
4634 rtx op0;
4635
4636 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4637 return NULL_RTX;
4638
4639 /* Compute the argument. */
4640 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4641 (subtarget
4642 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4643 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4644 VOIDmode, EXPAND_NORMAL);
4645 /* Compute op, into TARGET if possible.
4646 Set TARGET to wherever the result comes back. */
4647 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4648 op_optab, op0, target, op_optab != clrsb_optab);
4649 gcc_assert (target);
4650
4651 return convert_to_mode (target_mode, target, 0);
4652 }
4653
4654 /* Expand a call to __builtin_expect. We just return our argument
4655 as the builtin_expect semantic should've been already executed by
4656 tree branch prediction pass. */
4657
4658 static rtx
4659 expand_builtin_expect (tree exp, rtx target)
4660 {
4661 tree arg;
4662
4663 if (call_expr_nargs (exp) < 2)
4664 return const0_rtx;
4665 arg = CALL_EXPR_ARG (exp, 0);
4666
4667 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4668 /* When guessing was done, the hints should be already stripped away. */
4669 gcc_assert (!flag_guess_branch_prob
4670 || optimize == 0 || seen_error ());
4671 return target;
4672 }
4673
4674 /* Expand a call to __builtin_assume_aligned. We just return our first
4675 argument as the builtin_assume_aligned semantic should've been already
4676 executed by CCP. */
4677
4678 static rtx
4679 expand_builtin_assume_aligned (tree exp, rtx target)
4680 {
4681 if (call_expr_nargs (exp) < 2)
4682 return const0_rtx;
4683 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4684 EXPAND_NORMAL);
4685 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4686 && (call_expr_nargs (exp) < 3
4687 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4688 return target;
4689 }
4690
4691 void
4692 expand_builtin_trap (void)
4693 {
4694 if (targetm.have_trap ())
4695 {
4696 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4697 /* For trap insns when not accumulating outgoing args force
4698 REG_ARGS_SIZE note to prevent crossjumping of calls with
4699 different args sizes. */
4700 if (!ACCUMULATE_OUTGOING_ARGS)
4701 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4702 }
4703 else
4704 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4705 emit_barrier ();
4706 }
4707
4708 /* Expand a call to __builtin_unreachable. We do nothing except emit
4709 a barrier saying that control flow will not pass here.
4710
4711 It is the responsibility of the program being compiled to ensure
4712 that control flow does never reach __builtin_unreachable. */
4713 static void
4714 expand_builtin_unreachable (void)
4715 {
4716 emit_barrier ();
4717 }
4718
4719 /* Expand EXP, a call to fabs, fabsf or fabsl.
4720 Return NULL_RTX if a normal call should be emitted rather than expanding
4721 the function inline. If convenient, the result should be placed
4722 in TARGET. SUBTARGET may be used as the target for computing
4723 the operand. */
4724
4725 static rtx
4726 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4727 {
4728 machine_mode mode;
4729 tree arg;
4730 rtx op0;
4731
4732 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4733 return NULL_RTX;
4734
4735 arg = CALL_EXPR_ARG (exp, 0);
4736 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4737 mode = TYPE_MODE (TREE_TYPE (arg));
4738 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4739 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4740 }
4741
4742 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4743 Return NULL is a normal call should be emitted rather than expanding the
4744 function inline. If convenient, the result should be placed in TARGET.
4745 SUBTARGET may be used as the target for computing the operand. */
4746
4747 static rtx
4748 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4749 {
4750 rtx op0, op1;
4751 tree arg;
4752
4753 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4754 return NULL_RTX;
4755
4756 arg = CALL_EXPR_ARG (exp, 0);
4757 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4758
4759 arg = CALL_EXPR_ARG (exp, 1);
4760 op1 = expand_normal (arg);
4761
4762 return expand_copysign (op0, op1, target);
4763 }
4764
4765 /* Expand a call to __builtin___clear_cache. */
4766
4767 static rtx
4768 expand_builtin___clear_cache (tree exp)
4769 {
4770 if (!targetm.code_for_clear_cache)
4771 {
4772 #ifdef CLEAR_INSN_CACHE
4773 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4774 does something. Just do the default expansion to a call to
4775 __clear_cache(). */
4776 return NULL_RTX;
4777 #else
4778 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4779 does nothing. There is no need to call it. Do nothing. */
4780 return const0_rtx;
4781 #endif /* CLEAR_INSN_CACHE */
4782 }
4783
4784 /* We have a "clear_cache" insn, and it will handle everything. */
4785 tree begin, end;
4786 rtx begin_rtx, end_rtx;
4787
4788 /* We must not expand to a library call. If we did, any
4789 fallback library function in libgcc that might contain a call to
4790 __builtin___clear_cache() would recurse infinitely. */
4791 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4792 {
4793 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4794 return const0_rtx;
4795 }
4796
4797 if (targetm.have_clear_cache ())
4798 {
4799 struct expand_operand ops[2];
4800
4801 begin = CALL_EXPR_ARG (exp, 0);
4802 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4803
4804 end = CALL_EXPR_ARG (exp, 1);
4805 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4806
4807 create_address_operand (&ops[0], begin_rtx);
4808 create_address_operand (&ops[1], end_rtx);
4809 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4810 return const0_rtx;
4811 }
4812 return const0_rtx;
4813 }
4814
4815 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4816
4817 static rtx
4818 round_trampoline_addr (rtx tramp)
4819 {
4820 rtx temp, addend, mask;
4821
4822 /* If we don't need too much alignment, we'll have been guaranteed
4823 proper alignment by get_trampoline_type. */
4824 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4825 return tramp;
4826
4827 /* Round address up to desired boundary. */
4828 temp = gen_reg_rtx (Pmode);
4829 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4830 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4831
4832 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4833 temp, 0, OPTAB_LIB_WIDEN);
4834 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4835 temp, 0, OPTAB_LIB_WIDEN);
4836
4837 return tramp;
4838 }
4839
4840 static rtx
4841 expand_builtin_init_trampoline (tree exp, bool onstack)
4842 {
4843 tree t_tramp, t_func, t_chain;
4844 rtx m_tramp, r_tramp, r_chain, tmp;
4845
4846 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4847 POINTER_TYPE, VOID_TYPE))
4848 return NULL_RTX;
4849
4850 t_tramp = CALL_EXPR_ARG (exp, 0);
4851 t_func = CALL_EXPR_ARG (exp, 1);
4852 t_chain = CALL_EXPR_ARG (exp, 2);
4853
4854 r_tramp = expand_normal (t_tramp);
4855 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4856 MEM_NOTRAP_P (m_tramp) = 1;
4857
4858 /* If ONSTACK, the TRAMP argument should be the address of a field
4859 within the local function's FRAME decl. Either way, let's see if
4860 we can fill in the MEM_ATTRs for this memory. */
4861 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4862 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4863
4864 /* Creator of a heap trampoline is responsible for making sure the
4865 address is aligned to at least STACK_BOUNDARY. Normally malloc
4866 will ensure this anyhow. */
4867 tmp = round_trampoline_addr (r_tramp);
4868 if (tmp != r_tramp)
4869 {
4870 m_tramp = change_address (m_tramp, BLKmode, tmp);
4871 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4872 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4873 }
4874
4875 /* The FUNC argument should be the address of the nested function.
4876 Extract the actual function decl to pass to the hook. */
4877 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4878 t_func = TREE_OPERAND (t_func, 0);
4879 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4880
4881 r_chain = expand_normal (t_chain);
4882
4883 /* Generate insns to initialize the trampoline. */
4884 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4885
4886 if (onstack)
4887 {
4888 trampolines_created = 1;
4889
4890 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4891 "trampoline generated for nested function %qD", t_func);
4892 }
4893
4894 return const0_rtx;
4895 }
4896
4897 static rtx
4898 expand_builtin_adjust_trampoline (tree exp)
4899 {
4900 rtx tramp;
4901
4902 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4903 return NULL_RTX;
4904
4905 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4906 tramp = round_trampoline_addr (tramp);
4907 if (targetm.calls.trampoline_adjust_address)
4908 tramp = targetm.calls.trampoline_adjust_address (tramp);
4909
4910 return tramp;
4911 }
4912
4913 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4914 function. The function first checks whether the back end provides
4915 an insn to implement signbit for the respective mode. If not, it
4916 checks whether the floating point format of the value is such that
4917 the sign bit can be extracted. If that is not the case, error out.
4918 EXP is the expression that is a call to the builtin function; if
4919 convenient, the result should be placed in TARGET. */
4920 static rtx
4921 expand_builtin_signbit (tree exp, rtx target)
4922 {
4923 const struct real_format *fmt;
4924 machine_mode fmode, imode, rmode;
4925 tree arg;
4926 int word, bitpos;
4927 enum insn_code icode;
4928 rtx temp;
4929 location_t loc = EXPR_LOCATION (exp);
4930
4931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4932 return NULL_RTX;
4933
4934 arg = CALL_EXPR_ARG (exp, 0);
4935 fmode = TYPE_MODE (TREE_TYPE (arg));
4936 rmode = TYPE_MODE (TREE_TYPE (exp));
4937 fmt = REAL_MODE_FORMAT (fmode);
4938
4939 arg = builtin_save_expr (arg);
4940
4941 /* Expand the argument yielding a RTX expression. */
4942 temp = expand_normal (arg);
4943
4944 /* Check if the back end provides an insn that handles signbit for the
4945 argument's mode. */
4946 icode = optab_handler (signbit_optab, fmode);
4947 if (icode != CODE_FOR_nothing)
4948 {
4949 rtx_insn *last = get_last_insn ();
4950 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4951 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4952 return target;
4953 delete_insns_since (last);
4954 }
4955
4956 /* For floating point formats without a sign bit, implement signbit
4957 as "ARG < 0.0". */
4958 bitpos = fmt->signbit_ro;
4959 if (bitpos < 0)
4960 {
4961 /* But we can't do this if the format supports signed zero. */
4962 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4963
4964 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4965 build_real (TREE_TYPE (arg), dconst0));
4966 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4967 }
4968
4969 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4970 {
4971 imode = int_mode_for_mode (fmode);
4972 gcc_assert (imode != BLKmode);
4973 temp = gen_lowpart (imode, temp);
4974 }
4975 else
4976 {
4977 imode = word_mode;
4978 /* Handle targets with different FP word orders. */
4979 if (FLOAT_WORDS_BIG_ENDIAN)
4980 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4981 else
4982 word = bitpos / BITS_PER_WORD;
4983 temp = operand_subword_force (temp, word, fmode);
4984 bitpos = bitpos % BITS_PER_WORD;
4985 }
4986
4987 /* Force the intermediate word_mode (or narrower) result into a
4988 register. This avoids attempting to create paradoxical SUBREGs
4989 of floating point modes below. */
4990 temp = force_reg (imode, temp);
4991
4992 /* If the bitpos is within the "result mode" lowpart, the operation
4993 can be implement with a single bitwise AND. Otherwise, we need
4994 a right shift and an AND. */
4995
4996 if (bitpos < GET_MODE_BITSIZE (rmode))
4997 {
4998 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4999
5000 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5001 temp = gen_lowpart (rmode, temp);
5002 temp = expand_binop (rmode, and_optab, temp,
5003 immed_wide_int_const (mask, rmode),
5004 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5005 }
5006 else
5007 {
5008 /* Perform a logical right shift to place the signbit in the least
5009 significant bit, then truncate the result to the desired mode
5010 and mask just this bit. */
5011 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5012 temp = gen_lowpart (rmode, temp);
5013 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5014 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5015 }
5016
5017 return temp;
5018 }
5019
5020 /* Expand fork or exec calls. TARGET is the desired target of the
5021 call. EXP is the call. FN is the
5022 identificator of the actual function. IGNORE is nonzero if the
5023 value is to be ignored. */
5024
5025 static rtx
5026 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5027 {
5028 tree id, decl;
5029 tree call;
5030
5031 /* If we are not profiling, just call the function. */
5032 if (!profile_arc_flag)
5033 return NULL_RTX;
5034
5035 /* Otherwise call the wrapper. This should be equivalent for the rest of
5036 compiler, so the code does not diverge, and the wrapper may run the
5037 code necessary for keeping the profiling sane. */
5038
5039 switch (DECL_FUNCTION_CODE (fn))
5040 {
5041 case BUILT_IN_FORK:
5042 id = get_identifier ("__gcov_fork");
5043 break;
5044
5045 case BUILT_IN_EXECL:
5046 id = get_identifier ("__gcov_execl");
5047 break;
5048
5049 case BUILT_IN_EXECV:
5050 id = get_identifier ("__gcov_execv");
5051 break;
5052
5053 case BUILT_IN_EXECLP:
5054 id = get_identifier ("__gcov_execlp");
5055 break;
5056
5057 case BUILT_IN_EXECLE:
5058 id = get_identifier ("__gcov_execle");
5059 break;
5060
5061 case BUILT_IN_EXECVP:
5062 id = get_identifier ("__gcov_execvp");
5063 break;
5064
5065 case BUILT_IN_EXECVE:
5066 id = get_identifier ("__gcov_execve");
5067 break;
5068
5069 default:
5070 gcc_unreachable ();
5071 }
5072
5073 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5074 FUNCTION_DECL, id, TREE_TYPE (fn));
5075 DECL_EXTERNAL (decl) = 1;
5076 TREE_PUBLIC (decl) = 1;
5077 DECL_ARTIFICIAL (decl) = 1;
5078 TREE_NOTHROW (decl) = 1;
5079 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5080 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5081 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5082 return expand_call (call, target, ignore);
5083 }
5084
5085
5086 \f
5087 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5088 the pointer in these functions is void*, the tree optimizers may remove
5089 casts. The mode computed in expand_builtin isn't reliable either, due
5090 to __sync_bool_compare_and_swap.
5091
5092 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5093 group of builtins. This gives us log2 of the mode size. */
5094
5095 static inline machine_mode
5096 get_builtin_sync_mode (int fcode_diff)
5097 {
5098 /* The size is not negotiable, so ask not to get BLKmode in return
5099 if the target indicates that a smaller size would be better. */
5100 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5101 }
5102
5103 /* Expand the memory expression LOC and return the appropriate memory operand
5104 for the builtin_sync operations. */
5105
5106 static rtx
5107 get_builtin_sync_mem (tree loc, machine_mode mode)
5108 {
5109 rtx addr, mem;
5110
5111 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5112 addr = convert_memory_address (Pmode, addr);
5113
5114 /* Note that we explicitly do not want any alias information for this
5115 memory, so that we kill all other live memories. Otherwise we don't
5116 satisfy the full barrier semantics of the intrinsic. */
5117 mem = validize_mem (gen_rtx_MEM (mode, addr));
5118
5119 /* The alignment needs to be at least according to that of the mode. */
5120 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5121 get_pointer_alignment (loc)));
5122 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5123 MEM_VOLATILE_P (mem) = 1;
5124
5125 return mem;
5126 }
5127
5128 /* Make sure an argument is in the right mode.
5129 EXP is the tree argument.
5130 MODE is the mode it should be in. */
5131
5132 static rtx
5133 expand_expr_force_mode (tree exp, machine_mode mode)
5134 {
5135 rtx val;
5136 machine_mode old_mode;
5137
5138 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5139 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5140 of CONST_INTs, where we know the old_mode only from the call argument. */
5141
5142 old_mode = GET_MODE (val);
5143 if (old_mode == VOIDmode)
5144 old_mode = TYPE_MODE (TREE_TYPE (exp));
5145 val = convert_modes (mode, old_mode, val, 1);
5146 return val;
5147 }
5148
5149
5150 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5151 EXP is the CALL_EXPR. CODE is the rtx code
5152 that corresponds to the arithmetic or logical operation from the name;
5153 an exception here is that NOT actually means NAND. TARGET is an optional
5154 place for us to store the results; AFTER is true if this is the
5155 fetch_and_xxx form. */
5156
5157 static rtx
5158 expand_builtin_sync_operation (machine_mode mode, tree exp,
5159 enum rtx_code code, bool after,
5160 rtx target)
5161 {
5162 rtx val, mem;
5163 location_t loc = EXPR_LOCATION (exp);
5164
5165 if (code == NOT && warn_sync_nand)
5166 {
5167 tree fndecl = get_callee_fndecl (exp);
5168 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5169
5170 static bool warned_f_a_n, warned_n_a_f;
5171
5172 switch (fcode)
5173 {
5174 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5175 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5176 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5177 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5178 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5179 if (warned_f_a_n)
5180 break;
5181
5182 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5183 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5184 warned_f_a_n = true;
5185 break;
5186
5187 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5188 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5189 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5190 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5191 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5192 if (warned_n_a_f)
5193 break;
5194
5195 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5196 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5197 warned_n_a_f = true;
5198 break;
5199
5200 default:
5201 gcc_unreachable ();
5202 }
5203 }
5204
5205 /* Expand the operands. */
5206 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5207 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5208
5209 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5210 after);
5211 }
5212
5213 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5214 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5215 true if this is the boolean form. TARGET is a place for us to store the
5216 results; this is NOT optional if IS_BOOL is true. */
5217
5218 static rtx
5219 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5220 bool is_bool, rtx target)
5221 {
5222 rtx old_val, new_val, mem;
5223 rtx *pbool, *poval;
5224
5225 /* Expand the operands. */
5226 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5227 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5228 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5229
5230 pbool = poval = NULL;
5231 if (target != const0_rtx)
5232 {
5233 if (is_bool)
5234 pbool = &target;
5235 else
5236 poval = &target;
5237 }
5238 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5239 false, MEMMODEL_SYNC_SEQ_CST,
5240 MEMMODEL_SYNC_SEQ_CST))
5241 return NULL_RTX;
5242
5243 return target;
5244 }
5245
5246 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5247 general form is actually an atomic exchange, and some targets only
5248 support a reduced form with the second argument being a constant 1.
5249 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5250 the results. */
5251
5252 static rtx
5253 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5254 rtx target)
5255 {
5256 rtx val, mem;
5257
5258 /* Expand the operands. */
5259 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5260 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5261
5262 return expand_sync_lock_test_and_set (target, mem, val);
5263 }
5264
5265 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5266
5267 static void
5268 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5269 {
5270 rtx mem;
5271
5272 /* Expand the operands. */
5273 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5274
5275 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5276 }
5277
5278 /* Given an integer representing an ``enum memmodel'', verify its
5279 correctness and return the memory model enum. */
5280
5281 static enum memmodel
5282 get_memmodel (tree exp)
5283 {
5284 rtx op;
5285 unsigned HOST_WIDE_INT val;
5286
5287 /* If the parameter is not a constant, it's a run time value so we'll just
5288 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5289 if (TREE_CODE (exp) != INTEGER_CST)
5290 return MEMMODEL_SEQ_CST;
5291
5292 op = expand_normal (exp);
5293
5294 val = INTVAL (op);
5295 if (targetm.memmodel_check)
5296 val = targetm.memmodel_check (val);
5297 else if (val & ~MEMMODEL_MASK)
5298 {
5299 warning (OPT_Winvalid_memory_model,
5300 "Unknown architecture specifier in memory model to builtin.");
5301 return MEMMODEL_SEQ_CST;
5302 }
5303
5304 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5305 if (memmodel_base (val) >= MEMMODEL_LAST)
5306 {
5307 warning (OPT_Winvalid_memory_model,
5308 "invalid memory model argument to builtin");
5309 return MEMMODEL_SEQ_CST;
5310 }
5311
5312 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5313 be conservative and promote consume to acquire. */
5314 if (val == MEMMODEL_CONSUME)
5315 val = MEMMODEL_ACQUIRE;
5316
5317 return (enum memmodel) val;
5318 }
5319
5320 /* Expand the __atomic_exchange intrinsic:
5321 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5322 EXP is the CALL_EXPR.
5323 TARGET is an optional place for us to store the results. */
5324
5325 static rtx
5326 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5327 {
5328 rtx val, mem;
5329 enum memmodel model;
5330
5331 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5332
5333 if (!flag_inline_atomics)
5334 return NULL_RTX;
5335
5336 /* Expand the operands. */
5337 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5338 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5339
5340 return expand_atomic_exchange (target, mem, val, model);
5341 }
5342
5343 /* Expand the __atomic_compare_exchange intrinsic:
5344 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5345 TYPE desired, BOOL weak,
5346 enum memmodel success,
5347 enum memmodel failure)
5348 EXP is the CALL_EXPR.
5349 TARGET is an optional place for us to store the results. */
5350
5351 static rtx
5352 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5353 rtx target)
5354 {
5355 rtx expect, desired, mem, oldval;
5356 rtx_code_label *label;
5357 enum memmodel success, failure;
5358 tree weak;
5359 bool is_weak;
5360
5361 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5362 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5363
5364 if (failure > success)
5365 {
5366 warning (OPT_Winvalid_memory_model,
5367 "failure memory model cannot be stronger than success memory "
5368 "model for %<__atomic_compare_exchange%>");
5369 success = MEMMODEL_SEQ_CST;
5370 }
5371
5372 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5373 {
5374 warning (OPT_Winvalid_memory_model,
5375 "invalid failure memory model for "
5376 "%<__atomic_compare_exchange%>");
5377 failure = MEMMODEL_SEQ_CST;
5378 success = MEMMODEL_SEQ_CST;
5379 }
5380
5381
5382 if (!flag_inline_atomics)
5383 return NULL_RTX;
5384
5385 /* Expand the operands. */
5386 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5387
5388 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5389 expect = convert_memory_address (Pmode, expect);
5390 expect = gen_rtx_MEM (mode, expect);
5391 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5392
5393 weak = CALL_EXPR_ARG (exp, 3);
5394 is_weak = false;
5395 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5396 is_weak = true;
5397
5398 if (target == const0_rtx)
5399 target = NULL;
5400
5401 /* Lest the rtl backend create a race condition with an imporoper store
5402 to memory, always create a new pseudo for OLDVAL. */
5403 oldval = NULL;
5404
5405 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5406 is_weak, success, failure))
5407 return NULL_RTX;
5408
5409 /* Conditionally store back to EXPECT, lest we create a race condition
5410 with an improper store to memory. */
5411 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5412 the normal case where EXPECT is totally private, i.e. a register. At
5413 which point the store can be unconditional. */
5414 label = gen_label_rtx ();
5415 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5416 GET_MODE (target), 1, label);
5417 emit_move_insn (expect, oldval);
5418 emit_label (label);
5419
5420 return target;
5421 }
5422
5423 /* Expand the __atomic_load intrinsic:
5424 TYPE __atomic_load (TYPE *object, enum memmodel)
5425 EXP is the CALL_EXPR.
5426 TARGET is an optional place for us to store the results. */
5427
5428 static rtx
5429 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5430 {
5431 rtx mem;
5432 enum memmodel model;
5433
5434 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5435 if (is_mm_release (model) || is_mm_acq_rel (model))
5436 {
5437 warning (OPT_Winvalid_memory_model,
5438 "invalid memory model for %<__atomic_load%>");
5439 model = MEMMODEL_SEQ_CST;
5440 }
5441
5442 if (!flag_inline_atomics)
5443 return NULL_RTX;
5444
5445 /* Expand the operand. */
5446 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5447
5448 return expand_atomic_load (target, mem, model);
5449 }
5450
5451
5452 /* Expand the __atomic_store intrinsic:
5453 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5454 EXP is the CALL_EXPR.
5455 TARGET is an optional place for us to store the results. */
5456
5457 static rtx
5458 expand_builtin_atomic_store (machine_mode mode, tree exp)
5459 {
5460 rtx mem, val;
5461 enum memmodel model;
5462
5463 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5464 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5465 || is_mm_release (model)))
5466 {
5467 warning (OPT_Winvalid_memory_model,
5468 "invalid memory model for %<__atomic_store%>");
5469 model = MEMMODEL_SEQ_CST;
5470 }
5471
5472 if (!flag_inline_atomics)
5473 return NULL_RTX;
5474
5475 /* Expand the operands. */
5476 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5477 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5478
5479 return expand_atomic_store (mem, val, model, false);
5480 }
5481
5482 /* Expand the __atomic_fetch_XXX intrinsic:
5483 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5484 EXP is the CALL_EXPR.
5485 TARGET is an optional place for us to store the results.
5486 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5487 FETCH_AFTER is true if returning the result of the operation.
5488 FETCH_AFTER is false if returning the value before the operation.
5489 IGNORE is true if the result is not used.
5490 EXT_CALL is the correct builtin for an external call if this cannot be
5491 resolved to an instruction sequence. */
5492
5493 static rtx
5494 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5495 enum rtx_code code, bool fetch_after,
5496 bool ignore, enum built_in_function ext_call)
5497 {
5498 rtx val, mem, ret;
5499 enum memmodel model;
5500 tree fndecl;
5501 tree addr;
5502
5503 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5504
5505 /* Expand the operands. */
5506 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5507 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5508
5509 /* Only try generating instructions if inlining is turned on. */
5510 if (flag_inline_atomics)
5511 {
5512 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5513 if (ret)
5514 return ret;
5515 }
5516
5517 /* Return if a different routine isn't needed for the library call. */
5518 if (ext_call == BUILT_IN_NONE)
5519 return NULL_RTX;
5520
5521 /* Change the call to the specified function. */
5522 fndecl = get_callee_fndecl (exp);
5523 addr = CALL_EXPR_FN (exp);
5524 STRIP_NOPS (addr);
5525
5526 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5527 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5528
5529 /* Expand the call here so we can emit trailing code. */
5530 ret = expand_call (exp, target, ignore);
5531
5532 /* Replace the original function just in case it matters. */
5533 TREE_OPERAND (addr, 0) = fndecl;
5534
5535 /* Then issue the arithmetic correction to return the right result. */
5536 if (!ignore)
5537 {
5538 if (code == NOT)
5539 {
5540 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5541 OPTAB_LIB_WIDEN);
5542 ret = expand_simple_unop (mode, NOT, ret, target, true);
5543 }
5544 else
5545 ret = expand_simple_binop (mode, code, ret, val, target, true,
5546 OPTAB_LIB_WIDEN);
5547 }
5548 return ret;
5549 }
5550
5551 /* Expand an atomic clear operation.
5552 void _atomic_clear (BOOL *obj, enum memmodel)
5553 EXP is the call expression. */
5554
5555 static rtx
5556 expand_builtin_atomic_clear (tree exp)
5557 {
5558 machine_mode mode;
5559 rtx mem, ret;
5560 enum memmodel model;
5561
5562 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5563 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5564 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5565
5566 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5567 {
5568 warning (OPT_Winvalid_memory_model,
5569 "invalid memory model for %<__atomic_store%>");
5570 model = MEMMODEL_SEQ_CST;
5571 }
5572
5573 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5574 Failing that, a store is issued by __atomic_store. The only way this can
5575 fail is if the bool type is larger than a word size. Unlikely, but
5576 handle it anyway for completeness. Assume a single threaded model since
5577 there is no atomic support in this case, and no barriers are required. */
5578 ret = expand_atomic_store (mem, const0_rtx, model, true);
5579 if (!ret)
5580 emit_move_insn (mem, const0_rtx);
5581 return const0_rtx;
5582 }
5583
5584 /* Expand an atomic test_and_set operation.
5585 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5586 EXP is the call expression. */
5587
5588 static rtx
5589 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5590 {
5591 rtx mem;
5592 enum memmodel model;
5593 machine_mode mode;
5594
5595 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5596 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5597 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5598
5599 return expand_atomic_test_and_set (target, mem, model);
5600 }
5601
5602
5603 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5604 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5605
5606 static tree
5607 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5608 {
5609 int size;
5610 machine_mode mode;
5611 unsigned int mode_align, type_align;
5612
5613 if (TREE_CODE (arg0) != INTEGER_CST)
5614 return NULL_TREE;
5615
5616 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5617 mode = mode_for_size (size, MODE_INT, 0);
5618 mode_align = GET_MODE_ALIGNMENT (mode);
5619
5620 if (TREE_CODE (arg1) == INTEGER_CST)
5621 {
5622 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5623
5624 /* Either this argument is null, or it's a fake pointer encoding
5625 the alignment of the object. */
5626 val = val & -val;
5627 val *= BITS_PER_UNIT;
5628
5629 if (val == 0 || mode_align < val)
5630 type_align = mode_align;
5631 else
5632 type_align = val;
5633 }
5634 else
5635 {
5636 tree ttype = TREE_TYPE (arg1);
5637
5638 /* This function is usually invoked and folded immediately by the front
5639 end before anything else has a chance to look at it. The pointer
5640 parameter at this point is usually cast to a void *, so check for that
5641 and look past the cast. */
5642 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5643 && VOID_TYPE_P (TREE_TYPE (ttype)))
5644 arg1 = TREE_OPERAND (arg1, 0);
5645
5646 ttype = TREE_TYPE (arg1);
5647 gcc_assert (POINTER_TYPE_P (ttype));
5648
5649 /* Get the underlying type of the object. */
5650 ttype = TREE_TYPE (ttype);
5651 type_align = TYPE_ALIGN (ttype);
5652 }
5653
5654 /* If the object has smaller alignment, the lock free routines cannot
5655 be used. */
5656 if (type_align < mode_align)
5657 return boolean_false_node;
5658
5659 /* Check if a compare_and_swap pattern exists for the mode which represents
5660 the required size. The pattern is not allowed to fail, so the existence
5661 of the pattern indicates support is present. */
5662 if (can_compare_and_swap_p (mode, true))
5663 return boolean_true_node;
5664 else
5665 return boolean_false_node;
5666 }
5667
5668 /* Return true if the parameters to call EXP represent an object which will
5669 always generate lock free instructions. The first argument represents the
5670 size of the object, and the second parameter is a pointer to the object
5671 itself. If NULL is passed for the object, then the result is based on
5672 typical alignment for an object of the specified size. Otherwise return
5673 false. */
5674
5675 static rtx
5676 expand_builtin_atomic_always_lock_free (tree exp)
5677 {
5678 tree size;
5679 tree arg0 = CALL_EXPR_ARG (exp, 0);
5680 tree arg1 = CALL_EXPR_ARG (exp, 1);
5681
5682 if (TREE_CODE (arg0) != INTEGER_CST)
5683 {
5684 error ("non-constant argument 1 to __atomic_always_lock_free");
5685 return const0_rtx;
5686 }
5687
5688 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5689 if (size == boolean_true_node)
5690 return const1_rtx;
5691 return const0_rtx;
5692 }
5693
5694 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5695 is lock free on this architecture. */
5696
5697 static tree
5698 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5699 {
5700 if (!flag_inline_atomics)
5701 return NULL_TREE;
5702
5703 /* If it isn't always lock free, don't generate a result. */
5704 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5705 return boolean_true_node;
5706
5707 return NULL_TREE;
5708 }
5709
5710 /* Return true if the parameters to call EXP represent an object which will
5711 always generate lock free instructions. The first argument represents the
5712 size of the object, and the second parameter is a pointer to the object
5713 itself. If NULL is passed for the object, then the result is based on
5714 typical alignment for an object of the specified size. Otherwise return
5715 NULL*/
5716
5717 static rtx
5718 expand_builtin_atomic_is_lock_free (tree exp)
5719 {
5720 tree size;
5721 tree arg0 = CALL_EXPR_ARG (exp, 0);
5722 tree arg1 = CALL_EXPR_ARG (exp, 1);
5723
5724 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5725 {
5726 error ("non-integer argument 1 to __atomic_is_lock_free");
5727 return NULL_RTX;
5728 }
5729
5730 if (!flag_inline_atomics)
5731 return NULL_RTX;
5732
5733 /* If the value is known at compile time, return the RTX for it. */
5734 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5735 if (size == boolean_true_node)
5736 return const1_rtx;
5737
5738 return NULL_RTX;
5739 }
5740
5741 /* Expand the __atomic_thread_fence intrinsic:
5742 void __atomic_thread_fence (enum memmodel)
5743 EXP is the CALL_EXPR. */
5744
5745 static void
5746 expand_builtin_atomic_thread_fence (tree exp)
5747 {
5748 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5749 expand_mem_thread_fence (model);
5750 }
5751
5752 /* Expand the __atomic_signal_fence intrinsic:
5753 void __atomic_signal_fence (enum memmodel)
5754 EXP is the CALL_EXPR. */
5755
5756 static void
5757 expand_builtin_atomic_signal_fence (tree exp)
5758 {
5759 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5760 expand_mem_signal_fence (model);
5761 }
5762
5763 /* Expand the __sync_synchronize intrinsic. */
5764
5765 static void
5766 expand_builtin_sync_synchronize (void)
5767 {
5768 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5769 }
5770
5771 static rtx
5772 expand_builtin_thread_pointer (tree exp, rtx target)
5773 {
5774 enum insn_code icode;
5775 if (!validate_arglist (exp, VOID_TYPE))
5776 return const0_rtx;
5777 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5778 if (icode != CODE_FOR_nothing)
5779 {
5780 struct expand_operand op;
5781 /* If the target is not sutitable then create a new target. */
5782 if (target == NULL_RTX
5783 || !REG_P (target)
5784 || GET_MODE (target) != Pmode)
5785 target = gen_reg_rtx (Pmode);
5786 create_output_operand (&op, target, Pmode);
5787 expand_insn (icode, 1, &op);
5788 return target;
5789 }
5790 error ("__builtin_thread_pointer is not supported on this target");
5791 return const0_rtx;
5792 }
5793
5794 static void
5795 expand_builtin_set_thread_pointer (tree exp)
5796 {
5797 enum insn_code icode;
5798 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5799 return;
5800 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5801 if (icode != CODE_FOR_nothing)
5802 {
5803 struct expand_operand op;
5804 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5805 Pmode, EXPAND_NORMAL);
5806 create_input_operand (&op, val, Pmode);
5807 expand_insn (icode, 1, &op);
5808 return;
5809 }
5810 error ("__builtin_set_thread_pointer is not supported on this target");
5811 }
5812
5813 \f
5814 /* Emit code to restore the current value of stack. */
5815
5816 static void
5817 expand_stack_restore (tree var)
5818 {
5819 rtx_insn *prev;
5820 rtx sa = expand_normal (var);
5821
5822 sa = convert_memory_address (Pmode, sa);
5823
5824 prev = get_last_insn ();
5825 emit_stack_restore (SAVE_BLOCK, sa);
5826
5827 record_new_stack_level ();
5828
5829 fixup_args_size_notes (prev, get_last_insn (), 0);
5830 }
5831
5832 /* Emit code to save the current value of stack. */
5833
5834 static rtx
5835 expand_stack_save (void)
5836 {
5837 rtx ret = NULL_RTX;
5838
5839 emit_stack_save (SAVE_BLOCK, &ret);
5840 return ret;
5841 }
5842
5843
5844 /* Expand an expression EXP that calls a built-in function,
5845 with result going to TARGET if that's convenient
5846 (and in mode MODE if that's convenient).
5847 SUBTARGET may be used as the target for computing one of EXP's operands.
5848 IGNORE is nonzero if the value is to be ignored. */
5849
5850 rtx
5851 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5852 int ignore)
5853 {
5854 tree fndecl = get_callee_fndecl (exp);
5855 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5856 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5857 int flags;
5858
5859 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5860 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5861
5862 /* When ASan is enabled, we don't want to expand some memory/string
5863 builtins and rely on libsanitizer's hooks. This allows us to avoid
5864 redundant checks and be sure, that possible overflow will be detected
5865 by ASan. */
5866
5867 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5868 return expand_call (exp, target, ignore);
5869
5870 /* When not optimizing, generate calls to library functions for a certain
5871 set of builtins. */
5872 if (!optimize
5873 && !called_as_built_in (fndecl)
5874 && fcode != BUILT_IN_FORK
5875 && fcode != BUILT_IN_EXECL
5876 && fcode != BUILT_IN_EXECV
5877 && fcode != BUILT_IN_EXECLP
5878 && fcode != BUILT_IN_EXECLE
5879 && fcode != BUILT_IN_EXECVP
5880 && fcode != BUILT_IN_EXECVE
5881 && fcode != BUILT_IN_ALLOCA
5882 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5883 && fcode != BUILT_IN_FREE
5884 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5885 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5886 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5887 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5888 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5889 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5890 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5891 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5892 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5893 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5894 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5895 && fcode != BUILT_IN_CHKP_BNDRET)
5896 return expand_call (exp, target, ignore);
5897
5898 /* The built-in function expanders test for target == const0_rtx
5899 to determine whether the function's result will be ignored. */
5900 if (ignore)
5901 target = const0_rtx;
5902
5903 /* If the result of a pure or const built-in function is ignored, and
5904 none of its arguments are volatile, we can avoid expanding the
5905 built-in call and just evaluate the arguments for side-effects. */
5906 if (target == const0_rtx
5907 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5908 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5909 {
5910 bool volatilep = false;
5911 tree arg;
5912 call_expr_arg_iterator iter;
5913
5914 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5915 if (TREE_THIS_VOLATILE (arg))
5916 {
5917 volatilep = true;
5918 break;
5919 }
5920
5921 if (! volatilep)
5922 {
5923 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5924 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5925 return const0_rtx;
5926 }
5927 }
5928
5929 /* expand_builtin_with_bounds is supposed to be used for
5930 instrumented builtin calls. */
5931 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5932
5933 switch (fcode)
5934 {
5935 CASE_FLT_FN (BUILT_IN_FABS):
5936 case BUILT_IN_FABSD32:
5937 case BUILT_IN_FABSD64:
5938 case BUILT_IN_FABSD128:
5939 target = expand_builtin_fabs (exp, target, subtarget);
5940 if (target)
5941 return target;
5942 break;
5943
5944 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5945 target = expand_builtin_copysign (exp, target, subtarget);
5946 if (target)
5947 return target;
5948 break;
5949
5950 /* Just do a normal library call if we were unable to fold
5951 the values. */
5952 CASE_FLT_FN (BUILT_IN_CABS):
5953 break;
5954
5955 CASE_FLT_FN (BUILT_IN_EXP):
5956 CASE_FLT_FN (BUILT_IN_EXP10):
5957 CASE_FLT_FN (BUILT_IN_POW10):
5958 CASE_FLT_FN (BUILT_IN_EXP2):
5959 CASE_FLT_FN (BUILT_IN_EXPM1):
5960 CASE_FLT_FN (BUILT_IN_LOGB):
5961 CASE_FLT_FN (BUILT_IN_LOG):
5962 CASE_FLT_FN (BUILT_IN_LOG10):
5963 CASE_FLT_FN (BUILT_IN_LOG2):
5964 CASE_FLT_FN (BUILT_IN_LOG1P):
5965 CASE_FLT_FN (BUILT_IN_TAN):
5966 CASE_FLT_FN (BUILT_IN_ASIN):
5967 CASE_FLT_FN (BUILT_IN_ACOS):
5968 CASE_FLT_FN (BUILT_IN_ATAN):
5969 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5970 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5971 because of possible accuracy problems. */
5972 if (! flag_unsafe_math_optimizations)
5973 break;
5974 CASE_FLT_FN (BUILT_IN_SQRT):
5975 CASE_FLT_FN (BUILT_IN_FLOOR):
5976 CASE_FLT_FN (BUILT_IN_CEIL):
5977 CASE_FLT_FN (BUILT_IN_TRUNC):
5978 CASE_FLT_FN (BUILT_IN_ROUND):
5979 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5980 CASE_FLT_FN (BUILT_IN_RINT):
5981 target = expand_builtin_mathfn (exp, target, subtarget);
5982 if (target)
5983 return target;
5984 break;
5985
5986 CASE_FLT_FN (BUILT_IN_FMA):
5987 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5988 if (target)
5989 return target;
5990 break;
5991
5992 CASE_FLT_FN (BUILT_IN_ILOGB):
5993 if (! flag_unsafe_math_optimizations)
5994 break;
5995 CASE_FLT_FN (BUILT_IN_ISINF):
5996 CASE_FLT_FN (BUILT_IN_FINITE):
5997 case BUILT_IN_ISFINITE:
5998 case BUILT_IN_ISNORMAL:
5999 target = expand_builtin_interclass_mathfn (exp, target);
6000 if (target)
6001 return target;
6002 break;
6003
6004 CASE_FLT_FN (BUILT_IN_ICEIL):
6005 CASE_FLT_FN (BUILT_IN_LCEIL):
6006 CASE_FLT_FN (BUILT_IN_LLCEIL):
6007 CASE_FLT_FN (BUILT_IN_LFLOOR):
6008 CASE_FLT_FN (BUILT_IN_IFLOOR):
6009 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6010 target = expand_builtin_int_roundingfn (exp, target);
6011 if (target)
6012 return target;
6013 break;
6014
6015 CASE_FLT_FN (BUILT_IN_IRINT):
6016 CASE_FLT_FN (BUILT_IN_LRINT):
6017 CASE_FLT_FN (BUILT_IN_LLRINT):
6018 CASE_FLT_FN (BUILT_IN_IROUND):
6019 CASE_FLT_FN (BUILT_IN_LROUND):
6020 CASE_FLT_FN (BUILT_IN_LLROUND):
6021 target = expand_builtin_int_roundingfn_2 (exp, target);
6022 if (target)
6023 return target;
6024 break;
6025
6026 CASE_FLT_FN (BUILT_IN_POWI):
6027 target = expand_builtin_powi (exp, target);
6028 if (target)
6029 return target;
6030 break;
6031
6032 CASE_FLT_FN (BUILT_IN_ATAN2):
6033 CASE_FLT_FN (BUILT_IN_LDEXP):
6034 CASE_FLT_FN (BUILT_IN_SCALB):
6035 CASE_FLT_FN (BUILT_IN_SCALBN):
6036 CASE_FLT_FN (BUILT_IN_SCALBLN):
6037 if (! flag_unsafe_math_optimizations)
6038 break;
6039
6040 CASE_FLT_FN (BUILT_IN_FMOD):
6041 CASE_FLT_FN (BUILT_IN_REMAINDER):
6042 CASE_FLT_FN (BUILT_IN_DREM):
6043 CASE_FLT_FN (BUILT_IN_POW):
6044 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6045 if (target)
6046 return target;
6047 break;
6048
6049 CASE_FLT_FN (BUILT_IN_CEXPI):
6050 target = expand_builtin_cexpi (exp, target);
6051 gcc_assert (target);
6052 return target;
6053
6054 CASE_FLT_FN (BUILT_IN_SIN):
6055 CASE_FLT_FN (BUILT_IN_COS):
6056 if (! flag_unsafe_math_optimizations)
6057 break;
6058 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6059 if (target)
6060 return target;
6061 break;
6062
6063 CASE_FLT_FN (BUILT_IN_SINCOS):
6064 if (! flag_unsafe_math_optimizations)
6065 break;
6066 target = expand_builtin_sincos (exp);
6067 if (target)
6068 return target;
6069 break;
6070
6071 case BUILT_IN_APPLY_ARGS:
6072 return expand_builtin_apply_args ();
6073
6074 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6075 FUNCTION with a copy of the parameters described by
6076 ARGUMENTS, and ARGSIZE. It returns a block of memory
6077 allocated on the stack into which is stored all the registers
6078 that might possibly be used for returning the result of a
6079 function. ARGUMENTS is the value returned by
6080 __builtin_apply_args. ARGSIZE is the number of bytes of
6081 arguments that must be copied. ??? How should this value be
6082 computed? We'll also need a safe worst case value for varargs
6083 functions. */
6084 case BUILT_IN_APPLY:
6085 if (!validate_arglist (exp, POINTER_TYPE,
6086 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6087 && !validate_arglist (exp, REFERENCE_TYPE,
6088 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6089 return const0_rtx;
6090 else
6091 {
6092 rtx ops[3];
6093
6094 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6095 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6096 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6097
6098 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6099 }
6100
6101 /* __builtin_return (RESULT) causes the function to return the
6102 value described by RESULT. RESULT is address of the block of
6103 memory returned by __builtin_apply. */
6104 case BUILT_IN_RETURN:
6105 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6106 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6107 return const0_rtx;
6108
6109 case BUILT_IN_SAVEREGS:
6110 return expand_builtin_saveregs ();
6111
6112 case BUILT_IN_VA_ARG_PACK:
6113 /* All valid uses of __builtin_va_arg_pack () are removed during
6114 inlining. */
6115 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6116 return const0_rtx;
6117
6118 case BUILT_IN_VA_ARG_PACK_LEN:
6119 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6120 inlining. */
6121 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6122 return const0_rtx;
6123
6124 /* Return the address of the first anonymous stack arg. */
6125 case BUILT_IN_NEXT_ARG:
6126 if (fold_builtin_next_arg (exp, false))
6127 return const0_rtx;
6128 return expand_builtin_next_arg ();
6129
6130 case BUILT_IN_CLEAR_CACHE:
6131 target = expand_builtin___clear_cache (exp);
6132 if (target)
6133 return target;
6134 break;
6135
6136 case BUILT_IN_CLASSIFY_TYPE:
6137 return expand_builtin_classify_type (exp);
6138
6139 case BUILT_IN_CONSTANT_P:
6140 return const0_rtx;
6141
6142 case BUILT_IN_FRAME_ADDRESS:
6143 case BUILT_IN_RETURN_ADDRESS:
6144 return expand_builtin_frame_address (fndecl, exp);
6145
6146 /* Returns the address of the area where the structure is returned.
6147 0 otherwise. */
6148 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6149 if (call_expr_nargs (exp) != 0
6150 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6151 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6152 return const0_rtx;
6153 else
6154 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6155
6156 case BUILT_IN_ALLOCA:
6157 case BUILT_IN_ALLOCA_WITH_ALIGN:
6158 /* If the allocation stems from the declaration of a variable-sized
6159 object, it cannot accumulate. */
6160 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6161 if (target)
6162 return target;
6163 break;
6164
6165 case BUILT_IN_STACK_SAVE:
6166 return expand_stack_save ();
6167
6168 case BUILT_IN_STACK_RESTORE:
6169 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6170 return const0_rtx;
6171
6172 case BUILT_IN_BSWAP16:
6173 case BUILT_IN_BSWAP32:
6174 case BUILT_IN_BSWAP64:
6175 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6176 if (target)
6177 return target;
6178 break;
6179
6180 CASE_INT_FN (BUILT_IN_FFS):
6181 target = expand_builtin_unop (target_mode, exp, target,
6182 subtarget, ffs_optab);
6183 if (target)
6184 return target;
6185 break;
6186
6187 CASE_INT_FN (BUILT_IN_CLZ):
6188 target = expand_builtin_unop (target_mode, exp, target,
6189 subtarget, clz_optab);
6190 if (target)
6191 return target;
6192 break;
6193
6194 CASE_INT_FN (BUILT_IN_CTZ):
6195 target = expand_builtin_unop (target_mode, exp, target,
6196 subtarget, ctz_optab);
6197 if (target)
6198 return target;
6199 break;
6200
6201 CASE_INT_FN (BUILT_IN_CLRSB):
6202 target = expand_builtin_unop (target_mode, exp, target,
6203 subtarget, clrsb_optab);
6204 if (target)
6205 return target;
6206 break;
6207
6208 CASE_INT_FN (BUILT_IN_POPCOUNT):
6209 target = expand_builtin_unop (target_mode, exp, target,
6210 subtarget, popcount_optab);
6211 if (target)
6212 return target;
6213 break;
6214
6215 CASE_INT_FN (BUILT_IN_PARITY):
6216 target = expand_builtin_unop (target_mode, exp, target,
6217 subtarget, parity_optab);
6218 if (target)
6219 return target;
6220 break;
6221
6222 case BUILT_IN_STRLEN:
6223 target = expand_builtin_strlen (exp, target, target_mode);
6224 if (target)
6225 return target;
6226 break;
6227
6228 case BUILT_IN_STRCPY:
6229 target = expand_builtin_strcpy (exp, target);
6230 if (target)
6231 return target;
6232 break;
6233
6234 case BUILT_IN_STRNCPY:
6235 target = expand_builtin_strncpy (exp, target);
6236 if (target)
6237 return target;
6238 break;
6239
6240 case BUILT_IN_STPCPY:
6241 target = expand_builtin_stpcpy (exp, target, mode);
6242 if (target)
6243 return target;
6244 break;
6245
6246 case BUILT_IN_MEMCPY:
6247 target = expand_builtin_memcpy (exp, target);
6248 if (target)
6249 return target;
6250 break;
6251
6252 case BUILT_IN_MEMPCPY:
6253 target = expand_builtin_mempcpy (exp, target, mode);
6254 if (target)
6255 return target;
6256 break;
6257
6258 case BUILT_IN_MEMSET:
6259 target = expand_builtin_memset (exp, target, mode);
6260 if (target)
6261 return target;
6262 break;
6263
6264 case BUILT_IN_BZERO:
6265 target = expand_builtin_bzero (exp);
6266 if (target)
6267 return target;
6268 break;
6269
6270 case BUILT_IN_STRCMP:
6271 target = expand_builtin_strcmp (exp, target);
6272 if (target)
6273 return target;
6274 break;
6275
6276 case BUILT_IN_STRNCMP:
6277 target = expand_builtin_strncmp (exp, target, mode);
6278 if (target)
6279 return target;
6280 break;
6281
6282 case BUILT_IN_BCMP:
6283 case BUILT_IN_MEMCMP:
6284 target = expand_builtin_memcmp (exp, target);
6285 if (target)
6286 return target;
6287 break;
6288
6289 case BUILT_IN_SETJMP:
6290 /* This should have been lowered to the builtins below. */
6291 gcc_unreachable ();
6292
6293 case BUILT_IN_SETJMP_SETUP:
6294 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6295 and the receiver label. */
6296 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6297 {
6298 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6299 VOIDmode, EXPAND_NORMAL);
6300 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6301 rtx_insn *label_r = label_rtx (label);
6302
6303 /* This is copied from the handling of non-local gotos. */
6304 expand_builtin_setjmp_setup (buf_addr, label_r);
6305 nonlocal_goto_handler_labels
6306 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6307 nonlocal_goto_handler_labels);
6308 /* ??? Do not let expand_label treat us as such since we would
6309 not want to be both on the list of non-local labels and on
6310 the list of forced labels. */
6311 FORCED_LABEL (label) = 0;
6312 return const0_rtx;
6313 }
6314 break;
6315
6316 case BUILT_IN_SETJMP_RECEIVER:
6317 /* __builtin_setjmp_receiver is passed the receiver label. */
6318 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6319 {
6320 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6321 rtx_insn *label_r = label_rtx (label);
6322
6323 expand_builtin_setjmp_receiver (label_r);
6324 return const0_rtx;
6325 }
6326 break;
6327
6328 /* __builtin_longjmp is passed a pointer to an array of five words.
6329 It's similar to the C library longjmp function but works with
6330 __builtin_setjmp above. */
6331 case BUILT_IN_LONGJMP:
6332 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6333 {
6334 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6335 VOIDmode, EXPAND_NORMAL);
6336 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6337
6338 if (value != const1_rtx)
6339 {
6340 error ("%<__builtin_longjmp%> second argument must be 1");
6341 return const0_rtx;
6342 }
6343
6344 expand_builtin_longjmp (buf_addr, value);
6345 return const0_rtx;
6346 }
6347 break;
6348
6349 case BUILT_IN_NONLOCAL_GOTO:
6350 target = expand_builtin_nonlocal_goto (exp);
6351 if (target)
6352 return target;
6353 break;
6354
6355 /* This updates the setjmp buffer that is its argument with the value
6356 of the current stack pointer. */
6357 case BUILT_IN_UPDATE_SETJMP_BUF:
6358 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6359 {
6360 rtx buf_addr
6361 = expand_normal (CALL_EXPR_ARG (exp, 0));
6362
6363 expand_builtin_update_setjmp_buf (buf_addr);
6364 return const0_rtx;
6365 }
6366 break;
6367
6368 case BUILT_IN_TRAP:
6369 expand_builtin_trap ();
6370 return const0_rtx;
6371
6372 case BUILT_IN_UNREACHABLE:
6373 expand_builtin_unreachable ();
6374 return const0_rtx;
6375
6376 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6377 case BUILT_IN_SIGNBITD32:
6378 case BUILT_IN_SIGNBITD64:
6379 case BUILT_IN_SIGNBITD128:
6380 target = expand_builtin_signbit (exp, target);
6381 if (target)
6382 return target;
6383 break;
6384
6385 /* Various hooks for the DWARF 2 __throw routine. */
6386 case BUILT_IN_UNWIND_INIT:
6387 expand_builtin_unwind_init ();
6388 return const0_rtx;
6389 case BUILT_IN_DWARF_CFA:
6390 return virtual_cfa_rtx;
6391 #ifdef DWARF2_UNWIND_INFO
6392 case BUILT_IN_DWARF_SP_COLUMN:
6393 return expand_builtin_dwarf_sp_column ();
6394 case BUILT_IN_INIT_DWARF_REG_SIZES:
6395 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6396 return const0_rtx;
6397 #endif
6398 case BUILT_IN_FROB_RETURN_ADDR:
6399 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6400 case BUILT_IN_EXTRACT_RETURN_ADDR:
6401 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6402 case BUILT_IN_EH_RETURN:
6403 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6404 CALL_EXPR_ARG (exp, 1));
6405 return const0_rtx;
6406 case BUILT_IN_EH_RETURN_DATA_REGNO:
6407 return expand_builtin_eh_return_data_regno (exp);
6408 case BUILT_IN_EXTEND_POINTER:
6409 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6410 case BUILT_IN_EH_POINTER:
6411 return expand_builtin_eh_pointer (exp);
6412 case BUILT_IN_EH_FILTER:
6413 return expand_builtin_eh_filter (exp);
6414 case BUILT_IN_EH_COPY_VALUES:
6415 return expand_builtin_eh_copy_values (exp);
6416
6417 case BUILT_IN_VA_START:
6418 return expand_builtin_va_start (exp);
6419 case BUILT_IN_VA_END:
6420 return expand_builtin_va_end (exp);
6421 case BUILT_IN_VA_COPY:
6422 return expand_builtin_va_copy (exp);
6423 case BUILT_IN_EXPECT:
6424 return expand_builtin_expect (exp, target);
6425 case BUILT_IN_ASSUME_ALIGNED:
6426 return expand_builtin_assume_aligned (exp, target);
6427 case BUILT_IN_PREFETCH:
6428 expand_builtin_prefetch (exp);
6429 return const0_rtx;
6430
6431 case BUILT_IN_INIT_TRAMPOLINE:
6432 return expand_builtin_init_trampoline (exp, true);
6433 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6434 return expand_builtin_init_trampoline (exp, false);
6435 case BUILT_IN_ADJUST_TRAMPOLINE:
6436 return expand_builtin_adjust_trampoline (exp);
6437
6438 case BUILT_IN_FORK:
6439 case BUILT_IN_EXECL:
6440 case BUILT_IN_EXECV:
6441 case BUILT_IN_EXECLP:
6442 case BUILT_IN_EXECLE:
6443 case BUILT_IN_EXECVP:
6444 case BUILT_IN_EXECVE:
6445 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6446 if (target)
6447 return target;
6448 break;
6449
6450 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6451 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6452 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6453 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6454 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6455 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6456 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6457 if (target)
6458 return target;
6459 break;
6460
6461 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6462 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6463 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6464 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6465 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6466 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6467 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6468 if (target)
6469 return target;
6470 break;
6471
6472 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6473 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6474 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6475 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6476 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6477 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6478 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6479 if (target)
6480 return target;
6481 break;
6482
6483 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6484 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6485 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6486 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6487 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6488 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6489 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6490 if (target)
6491 return target;
6492 break;
6493
6494 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6495 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6496 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6497 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6498 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6500 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6501 if (target)
6502 return target;
6503 break;
6504
6505 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6506 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6507 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6508 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6509 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6510 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6511 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6512 if (target)
6513 return target;
6514 break;
6515
6516 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6517 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6518 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6519 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6520 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6521 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6522 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6523 if (target)
6524 return target;
6525 break;
6526
6527 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6528 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6529 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6530 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6531 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6532 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6533 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6534 if (target)
6535 return target;
6536 break;
6537
6538 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6539 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6540 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6541 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6542 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6544 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6545 if (target)
6546 return target;
6547 break;
6548
6549 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6550 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6551 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6552 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6553 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6555 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6556 if (target)
6557 return target;
6558 break;
6559
6560 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6561 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6562 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6563 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6564 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6565 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6566 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6567 if (target)
6568 return target;
6569 break;
6570
6571 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6572 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6573 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6574 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6575 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6576 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6577 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6578 if (target)
6579 return target;
6580 break;
6581
6582 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6583 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6584 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6585 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6586 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6587 if (mode == VOIDmode)
6588 mode = TYPE_MODE (boolean_type_node);
6589 if (!target || !register_operand (target, mode))
6590 target = gen_reg_rtx (mode);
6591
6592 mode = get_builtin_sync_mode
6593 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6594 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6595 if (target)
6596 return target;
6597 break;
6598
6599 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6600 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6601 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6602 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6603 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6604 mode = get_builtin_sync_mode
6605 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6606 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6607 if (target)
6608 return target;
6609 break;
6610
6611 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6612 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6613 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6614 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6615 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6616 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6617 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6618 if (target)
6619 return target;
6620 break;
6621
6622 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6623 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6624 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6625 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6626 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6627 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6628 expand_builtin_sync_lock_release (mode, exp);
6629 return const0_rtx;
6630
6631 case BUILT_IN_SYNC_SYNCHRONIZE:
6632 expand_builtin_sync_synchronize ();
6633 return const0_rtx;
6634
6635 case BUILT_IN_ATOMIC_EXCHANGE_1:
6636 case BUILT_IN_ATOMIC_EXCHANGE_2:
6637 case BUILT_IN_ATOMIC_EXCHANGE_4:
6638 case BUILT_IN_ATOMIC_EXCHANGE_8:
6639 case BUILT_IN_ATOMIC_EXCHANGE_16:
6640 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6641 target = expand_builtin_atomic_exchange (mode, exp, target);
6642 if (target)
6643 return target;
6644 break;
6645
6646 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6647 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6648 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6649 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6650 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6651 {
6652 unsigned int nargs, z;
6653 vec<tree, va_gc> *vec;
6654
6655 mode =
6656 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6657 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6658 if (target)
6659 return target;
6660
6661 /* If this is turned into an external library call, the weak parameter
6662 must be dropped to match the expected parameter list. */
6663 nargs = call_expr_nargs (exp);
6664 vec_alloc (vec, nargs - 1);
6665 for (z = 0; z < 3; z++)
6666 vec->quick_push (CALL_EXPR_ARG (exp, z));
6667 /* Skip the boolean weak parameter. */
6668 for (z = 4; z < 6; z++)
6669 vec->quick_push (CALL_EXPR_ARG (exp, z));
6670 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6671 break;
6672 }
6673
6674 case BUILT_IN_ATOMIC_LOAD_1:
6675 case BUILT_IN_ATOMIC_LOAD_2:
6676 case BUILT_IN_ATOMIC_LOAD_4:
6677 case BUILT_IN_ATOMIC_LOAD_8:
6678 case BUILT_IN_ATOMIC_LOAD_16:
6679 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6680 target = expand_builtin_atomic_load (mode, exp, target);
6681 if (target)
6682 return target;
6683 break;
6684
6685 case BUILT_IN_ATOMIC_STORE_1:
6686 case BUILT_IN_ATOMIC_STORE_2:
6687 case BUILT_IN_ATOMIC_STORE_4:
6688 case BUILT_IN_ATOMIC_STORE_8:
6689 case BUILT_IN_ATOMIC_STORE_16:
6690 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6691 target = expand_builtin_atomic_store (mode, exp);
6692 if (target)
6693 return const0_rtx;
6694 break;
6695
6696 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6697 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6698 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6699 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6700 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6701 {
6702 enum built_in_function lib;
6703 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6704 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6705 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6706 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6707 ignore, lib);
6708 if (target)
6709 return target;
6710 break;
6711 }
6712 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6713 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6714 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6715 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6716 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6717 {
6718 enum built_in_function lib;
6719 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6720 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6721 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6722 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6723 ignore, lib);
6724 if (target)
6725 return target;
6726 break;
6727 }
6728 case BUILT_IN_ATOMIC_AND_FETCH_1:
6729 case BUILT_IN_ATOMIC_AND_FETCH_2:
6730 case BUILT_IN_ATOMIC_AND_FETCH_4:
6731 case BUILT_IN_ATOMIC_AND_FETCH_8:
6732 case BUILT_IN_ATOMIC_AND_FETCH_16:
6733 {
6734 enum built_in_function lib;
6735 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6736 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6737 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6738 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6739 ignore, lib);
6740 if (target)
6741 return target;
6742 break;
6743 }
6744 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6745 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6746 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6747 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6748 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6749 {
6750 enum built_in_function lib;
6751 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6752 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6753 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6754 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6755 ignore, lib);
6756 if (target)
6757 return target;
6758 break;
6759 }
6760 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6761 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6762 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6763 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6764 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6765 {
6766 enum built_in_function lib;
6767 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6768 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6769 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6770 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6771 ignore, lib);
6772 if (target)
6773 return target;
6774 break;
6775 }
6776 case BUILT_IN_ATOMIC_OR_FETCH_1:
6777 case BUILT_IN_ATOMIC_OR_FETCH_2:
6778 case BUILT_IN_ATOMIC_OR_FETCH_4:
6779 case BUILT_IN_ATOMIC_OR_FETCH_8:
6780 case BUILT_IN_ATOMIC_OR_FETCH_16:
6781 {
6782 enum built_in_function lib;
6783 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6784 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6785 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6786 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6787 ignore, lib);
6788 if (target)
6789 return target;
6790 break;
6791 }
6792 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6793 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6794 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6795 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6796 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6798 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6799 ignore, BUILT_IN_NONE);
6800 if (target)
6801 return target;
6802 break;
6803
6804 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6805 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6806 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6807 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6808 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6810 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6811 ignore, BUILT_IN_NONE);
6812 if (target)
6813 return target;
6814 break;
6815
6816 case BUILT_IN_ATOMIC_FETCH_AND_1:
6817 case BUILT_IN_ATOMIC_FETCH_AND_2:
6818 case BUILT_IN_ATOMIC_FETCH_AND_4:
6819 case BUILT_IN_ATOMIC_FETCH_AND_8:
6820 case BUILT_IN_ATOMIC_FETCH_AND_16:
6821 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6822 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6823 ignore, BUILT_IN_NONE);
6824 if (target)
6825 return target;
6826 break;
6827
6828 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6829 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6830 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6831 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6832 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6833 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6834 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6835 ignore, BUILT_IN_NONE);
6836 if (target)
6837 return target;
6838 break;
6839
6840 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6841 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6842 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6843 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6844 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6845 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6846 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6847 ignore, BUILT_IN_NONE);
6848 if (target)
6849 return target;
6850 break;
6851
6852 case BUILT_IN_ATOMIC_FETCH_OR_1:
6853 case BUILT_IN_ATOMIC_FETCH_OR_2:
6854 case BUILT_IN_ATOMIC_FETCH_OR_4:
6855 case BUILT_IN_ATOMIC_FETCH_OR_8:
6856 case BUILT_IN_ATOMIC_FETCH_OR_16:
6857 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6858 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6859 ignore, BUILT_IN_NONE);
6860 if (target)
6861 return target;
6862 break;
6863
6864 case BUILT_IN_ATOMIC_TEST_AND_SET:
6865 return expand_builtin_atomic_test_and_set (exp, target);
6866
6867 case BUILT_IN_ATOMIC_CLEAR:
6868 return expand_builtin_atomic_clear (exp);
6869
6870 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6871 return expand_builtin_atomic_always_lock_free (exp);
6872
6873 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6874 target = expand_builtin_atomic_is_lock_free (exp);
6875 if (target)
6876 return target;
6877 break;
6878
6879 case BUILT_IN_ATOMIC_THREAD_FENCE:
6880 expand_builtin_atomic_thread_fence (exp);
6881 return const0_rtx;
6882
6883 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6884 expand_builtin_atomic_signal_fence (exp);
6885 return const0_rtx;
6886
6887 case BUILT_IN_OBJECT_SIZE:
6888 return expand_builtin_object_size (exp);
6889
6890 case BUILT_IN_MEMCPY_CHK:
6891 case BUILT_IN_MEMPCPY_CHK:
6892 case BUILT_IN_MEMMOVE_CHK:
6893 case BUILT_IN_MEMSET_CHK:
6894 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6895 if (target)
6896 return target;
6897 break;
6898
6899 case BUILT_IN_STRCPY_CHK:
6900 case BUILT_IN_STPCPY_CHK:
6901 case BUILT_IN_STRNCPY_CHK:
6902 case BUILT_IN_STPNCPY_CHK:
6903 case BUILT_IN_STRCAT_CHK:
6904 case BUILT_IN_STRNCAT_CHK:
6905 case BUILT_IN_SNPRINTF_CHK:
6906 case BUILT_IN_VSNPRINTF_CHK:
6907 maybe_emit_chk_warning (exp, fcode);
6908 break;
6909
6910 case BUILT_IN_SPRINTF_CHK:
6911 case BUILT_IN_VSPRINTF_CHK:
6912 maybe_emit_sprintf_chk_warning (exp, fcode);
6913 break;
6914
6915 case BUILT_IN_FREE:
6916 if (warn_free_nonheap_object)
6917 maybe_emit_free_warning (exp);
6918 break;
6919
6920 case BUILT_IN_THREAD_POINTER:
6921 return expand_builtin_thread_pointer (exp, target);
6922
6923 case BUILT_IN_SET_THREAD_POINTER:
6924 expand_builtin_set_thread_pointer (exp);
6925 return const0_rtx;
6926
6927 case BUILT_IN_CILK_DETACH:
6928 expand_builtin_cilk_detach (exp);
6929 return const0_rtx;
6930
6931 case BUILT_IN_CILK_POP_FRAME:
6932 expand_builtin_cilk_pop_frame (exp);
6933 return const0_rtx;
6934
6935 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6936 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6937 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6938 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6939 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6940 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6941 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6942 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6943 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6944 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6945 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6946 /* We allow user CHKP builtins if Pointer Bounds
6947 Checker is off. */
6948 if (!chkp_function_instrumented_p (current_function_decl))
6949 {
6950 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6951 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6952 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6953 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6954 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6955 return expand_normal (CALL_EXPR_ARG (exp, 0));
6956 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6957 return expand_normal (size_zero_node);
6958 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6959 return expand_normal (size_int (-1));
6960 else
6961 return const0_rtx;
6962 }
6963 /* FALLTHROUGH */
6964
6965 case BUILT_IN_CHKP_BNDMK:
6966 case BUILT_IN_CHKP_BNDSTX:
6967 case BUILT_IN_CHKP_BNDCL:
6968 case BUILT_IN_CHKP_BNDCU:
6969 case BUILT_IN_CHKP_BNDLDX:
6970 case BUILT_IN_CHKP_BNDRET:
6971 case BUILT_IN_CHKP_INTERSECT:
6972 case BUILT_IN_CHKP_NARROW:
6973 case BUILT_IN_CHKP_EXTRACT_LOWER:
6974 case BUILT_IN_CHKP_EXTRACT_UPPER:
6975 /* Software implementation of Pointer Bounds Checker is NYI.
6976 Target support is required. */
6977 error ("Your target platform does not support -fcheck-pointer-bounds");
6978 break;
6979
6980 case BUILT_IN_ACC_ON_DEVICE:
6981 /* Do library call, if we failed to expand the builtin when
6982 folding. */
6983 break;
6984
6985 default: /* just do library call, if unknown builtin */
6986 break;
6987 }
6988
6989 /* The switch statement above can drop through to cause the function
6990 to be called normally. */
6991 return expand_call (exp, target, ignore);
6992 }
6993
6994 /* Similar to expand_builtin but is used for instrumented calls. */
6995
6996 rtx
6997 expand_builtin_with_bounds (tree exp, rtx target,
6998 rtx subtarget ATTRIBUTE_UNUSED,
6999 machine_mode mode, int ignore)
7000 {
7001 tree fndecl = get_callee_fndecl (exp);
7002 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7003
7004 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7005
7006 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7007 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7008
7009 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7010 && fcode < END_CHKP_BUILTINS);
7011
7012 switch (fcode)
7013 {
7014 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7015 target = expand_builtin_memcpy_with_bounds (exp, target);
7016 if (target)
7017 return target;
7018 break;
7019
7020 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7021 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7022 if (target)
7023 return target;
7024 break;
7025
7026 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7027 target = expand_builtin_memset_with_bounds (exp, target, mode);
7028 if (target)
7029 return target;
7030 break;
7031
7032 default:
7033 break;
7034 }
7035
7036 /* The switch statement above can drop through to cause the function
7037 to be called normally. */
7038 return expand_call (exp, target, ignore);
7039 }
7040
7041 /* Determine whether a tree node represents a call to a built-in
7042 function. If the tree T is a call to a built-in function with
7043 the right number of arguments of the appropriate types, return
7044 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7045 Otherwise the return value is END_BUILTINS. */
7046
7047 enum built_in_function
7048 builtin_mathfn_code (const_tree t)
7049 {
7050 const_tree fndecl, arg, parmlist;
7051 const_tree argtype, parmtype;
7052 const_call_expr_arg_iterator iter;
7053
7054 if (TREE_CODE (t) != CALL_EXPR
7055 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7056 return END_BUILTINS;
7057
7058 fndecl = get_callee_fndecl (t);
7059 if (fndecl == NULL_TREE
7060 || TREE_CODE (fndecl) != FUNCTION_DECL
7061 || ! DECL_BUILT_IN (fndecl)
7062 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7063 return END_BUILTINS;
7064
7065 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7066 init_const_call_expr_arg_iterator (t, &iter);
7067 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7068 {
7069 /* If a function doesn't take a variable number of arguments,
7070 the last element in the list will have type `void'. */
7071 parmtype = TREE_VALUE (parmlist);
7072 if (VOID_TYPE_P (parmtype))
7073 {
7074 if (more_const_call_expr_args_p (&iter))
7075 return END_BUILTINS;
7076 return DECL_FUNCTION_CODE (fndecl);
7077 }
7078
7079 if (! more_const_call_expr_args_p (&iter))
7080 return END_BUILTINS;
7081
7082 arg = next_const_call_expr_arg (&iter);
7083 argtype = TREE_TYPE (arg);
7084
7085 if (SCALAR_FLOAT_TYPE_P (parmtype))
7086 {
7087 if (! SCALAR_FLOAT_TYPE_P (argtype))
7088 return END_BUILTINS;
7089 }
7090 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7091 {
7092 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7093 return END_BUILTINS;
7094 }
7095 else if (POINTER_TYPE_P (parmtype))
7096 {
7097 if (! POINTER_TYPE_P (argtype))
7098 return END_BUILTINS;
7099 }
7100 else if (INTEGRAL_TYPE_P (parmtype))
7101 {
7102 if (! INTEGRAL_TYPE_P (argtype))
7103 return END_BUILTINS;
7104 }
7105 else
7106 return END_BUILTINS;
7107 }
7108
7109 /* Variable-length argument list. */
7110 return DECL_FUNCTION_CODE (fndecl);
7111 }
7112
7113 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7114 evaluate to a constant. */
7115
7116 static tree
7117 fold_builtin_constant_p (tree arg)
7118 {
7119 /* We return 1 for a numeric type that's known to be a constant
7120 value at compile-time or for an aggregate type that's a
7121 literal constant. */
7122 STRIP_NOPS (arg);
7123
7124 /* If we know this is a constant, emit the constant of one. */
7125 if (CONSTANT_CLASS_P (arg)
7126 || (TREE_CODE (arg) == CONSTRUCTOR
7127 && TREE_CONSTANT (arg)))
7128 return integer_one_node;
7129 if (TREE_CODE (arg) == ADDR_EXPR)
7130 {
7131 tree op = TREE_OPERAND (arg, 0);
7132 if (TREE_CODE (op) == STRING_CST
7133 || (TREE_CODE (op) == ARRAY_REF
7134 && integer_zerop (TREE_OPERAND (op, 1))
7135 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7136 return integer_one_node;
7137 }
7138
7139 /* If this expression has side effects, show we don't know it to be a
7140 constant. Likewise if it's a pointer or aggregate type since in
7141 those case we only want literals, since those are only optimized
7142 when generating RTL, not later.
7143 And finally, if we are compiling an initializer, not code, we
7144 need to return a definite result now; there's not going to be any
7145 more optimization done. */
7146 if (TREE_SIDE_EFFECTS (arg)
7147 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7148 || POINTER_TYPE_P (TREE_TYPE (arg))
7149 || cfun == 0
7150 || folding_initializer
7151 || force_folding_builtin_constant_p)
7152 return integer_zero_node;
7153
7154 return NULL_TREE;
7155 }
7156
7157 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7158 return it as a truthvalue. */
7159
7160 static tree
7161 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7162 tree predictor)
7163 {
7164 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7165
7166 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7167 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7168 ret_type = TREE_TYPE (TREE_TYPE (fn));
7169 pred_type = TREE_VALUE (arg_types);
7170 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7171
7172 pred = fold_convert_loc (loc, pred_type, pred);
7173 expected = fold_convert_loc (loc, expected_type, expected);
7174 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7175 predictor);
7176
7177 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7178 build_int_cst (ret_type, 0));
7179 }
7180
7181 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7182 NULL_TREE if no simplification is possible. */
7183
7184 tree
7185 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7186 {
7187 tree inner, fndecl, inner_arg0;
7188 enum tree_code code;
7189
7190 /* Distribute the expected value over short-circuiting operators.
7191 See through the cast from truthvalue_type_node to long. */
7192 inner_arg0 = arg0;
7193 while (CONVERT_EXPR_P (inner_arg0)
7194 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7195 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7196 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7197
7198 /* If this is a builtin_expect within a builtin_expect keep the
7199 inner one. See through a comparison against a constant. It
7200 might have been added to create a thruthvalue. */
7201 inner = inner_arg0;
7202
7203 if (COMPARISON_CLASS_P (inner)
7204 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7205 inner = TREE_OPERAND (inner, 0);
7206
7207 if (TREE_CODE (inner) == CALL_EXPR
7208 && (fndecl = get_callee_fndecl (inner))
7209 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7210 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7211 return arg0;
7212
7213 inner = inner_arg0;
7214 code = TREE_CODE (inner);
7215 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7216 {
7217 tree op0 = TREE_OPERAND (inner, 0);
7218 tree op1 = TREE_OPERAND (inner, 1);
7219
7220 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7221 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7222 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7223
7224 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7225 }
7226
7227 /* If the argument isn't invariant then there's nothing else we can do. */
7228 if (!TREE_CONSTANT (inner_arg0))
7229 return NULL_TREE;
7230
7231 /* If we expect that a comparison against the argument will fold to
7232 a constant return the constant. In practice, this means a true
7233 constant or the address of a non-weak symbol. */
7234 inner = inner_arg0;
7235 STRIP_NOPS (inner);
7236 if (TREE_CODE (inner) == ADDR_EXPR)
7237 {
7238 do
7239 {
7240 inner = TREE_OPERAND (inner, 0);
7241 }
7242 while (TREE_CODE (inner) == COMPONENT_REF
7243 || TREE_CODE (inner) == ARRAY_REF);
7244 if ((TREE_CODE (inner) == VAR_DECL
7245 || TREE_CODE (inner) == FUNCTION_DECL)
7246 && DECL_WEAK (inner))
7247 return NULL_TREE;
7248 }
7249
7250 /* Otherwise, ARG0 already has the proper type for the return value. */
7251 return arg0;
7252 }
7253
7254 /* Fold a call to __builtin_classify_type with argument ARG. */
7255
7256 static tree
7257 fold_builtin_classify_type (tree arg)
7258 {
7259 if (arg == 0)
7260 return build_int_cst (integer_type_node, no_type_class);
7261
7262 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7263 }
7264
7265 /* Fold a call to __builtin_strlen with argument ARG. */
7266
7267 static tree
7268 fold_builtin_strlen (location_t loc, tree type, tree arg)
7269 {
7270 if (!validate_arg (arg, POINTER_TYPE))
7271 return NULL_TREE;
7272 else
7273 {
7274 tree len = c_strlen (arg, 0);
7275
7276 if (len)
7277 return fold_convert_loc (loc, type, len);
7278
7279 return NULL_TREE;
7280 }
7281 }
7282
7283 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7284
7285 static tree
7286 fold_builtin_inf (location_t loc, tree type, int warn)
7287 {
7288 REAL_VALUE_TYPE real;
7289
7290 /* __builtin_inff is intended to be usable to define INFINITY on all
7291 targets. If an infinity is not available, INFINITY expands "to a
7292 positive constant of type float that overflows at translation
7293 time", footnote "In this case, using INFINITY will violate the
7294 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7295 Thus we pedwarn to ensure this constraint violation is
7296 diagnosed. */
7297 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7298 pedwarn (loc, 0, "target format does not support infinity");
7299
7300 real_inf (&real);
7301 return build_real (type, real);
7302 }
7303
7304 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7305
7306 static tree
7307 fold_builtin_nan (tree arg, tree type, int quiet)
7308 {
7309 REAL_VALUE_TYPE real;
7310 const char *str;
7311
7312 if (!validate_arg (arg, POINTER_TYPE))
7313 return NULL_TREE;
7314 str = c_getstr (arg);
7315 if (!str)
7316 return NULL_TREE;
7317
7318 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7319 return NULL_TREE;
7320
7321 return build_real (type, real);
7322 }
7323
7324 /* Return true if the floating point expression T has an integer value.
7325 We also allow +Inf, -Inf and NaN to be considered integer values. */
7326
7327 static bool
7328 integer_valued_real_p (tree t)
7329 {
7330 switch (TREE_CODE (t))
7331 {
7332 case FLOAT_EXPR:
7333 return true;
7334
7335 case ABS_EXPR:
7336 case SAVE_EXPR:
7337 return integer_valued_real_p (TREE_OPERAND (t, 0));
7338
7339 case COMPOUND_EXPR:
7340 case MODIFY_EXPR:
7341 case BIND_EXPR:
7342 return integer_valued_real_p (TREE_OPERAND (t, 1));
7343
7344 case PLUS_EXPR:
7345 case MINUS_EXPR:
7346 case MULT_EXPR:
7347 case MIN_EXPR:
7348 case MAX_EXPR:
7349 return integer_valued_real_p (TREE_OPERAND (t, 0))
7350 && integer_valued_real_p (TREE_OPERAND (t, 1));
7351
7352 case COND_EXPR:
7353 return integer_valued_real_p (TREE_OPERAND (t, 1))
7354 && integer_valued_real_p (TREE_OPERAND (t, 2));
7355
7356 case REAL_CST:
7357 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7358
7359 CASE_CONVERT:
7360 {
7361 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7362 if (TREE_CODE (type) == INTEGER_TYPE)
7363 return true;
7364 if (TREE_CODE (type) == REAL_TYPE)
7365 return integer_valued_real_p (TREE_OPERAND (t, 0));
7366 break;
7367 }
7368
7369 case CALL_EXPR:
7370 switch (builtin_mathfn_code (t))
7371 {
7372 CASE_FLT_FN (BUILT_IN_CEIL):
7373 CASE_FLT_FN (BUILT_IN_FLOOR):
7374 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7375 CASE_FLT_FN (BUILT_IN_RINT):
7376 CASE_FLT_FN (BUILT_IN_ROUND):
7377 CASE_FLT_FN (BUILT_IN_TRUNC):
7378 return true;
7379
7380 CASE_FLT_FN (BUILT_IN_FMIN):
7381 CASE_FLT_FN (BUILT_IN_FMAX):
7382 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7383 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7384
7385 default:
7386 break;
7387 }
7388 break;
7389
7390 default:
7391 break;
7392 }
7393 return false;
7394 }
7395
7396 /* FNDECL is assumed to be a builtin where truncation can be propagated
7397 across (for instance floor((double)f) == (double)floorf (f).
7398 Do the transformation for a call with argument ARG. */
7399
7400 static tree
7401 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7402 {
7403 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7404
7405 if (!validate_arg (arg, REAL_TYPE))
7406 return NULL_TREE;
7407
7408 /* Integer rounding functions are idempotent. */
7409 if (fcode == builtin_mathfn_code (arg))
7410 return arg;
7411
7412 /* If argument is already integer valued, and we don't need to worry
7413 about setting errno, there's no need to perform rounding. */
7414 if (! flag_errno_math && integer_valued_real_p (arg))
7415 return arg;
7416
7417 if (optimize)
7418 {
7419 tree arg0 = strip_float_extensions (arg);
7420 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7421 tree newtype = TREE_TYPE (arg0);
7422 tree decl;
7423
7424 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7425 && (decl = mathfn_built_in (newtype, fcode)))
7426 return fold_convert_loc (loc, ftype,
7427 build_call_expr_loc (loc, decl, 1,
7428 fold_convert_loc (loc,
7429 newtype,
7430 arg0)));
7431 }
7432 return NULL_TREE;
7433 }
7434
7435 /* FNDECL is assumed to be builtin which can narrow the FP type of
7436 the argument, for instance lround((double)f) -> lroundf (f).
7437 Do the transformation for a call with argument ARG. */
7438
7439 static tree
7440 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7441 {
7442 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7443
7444 if (!validate_arg (arg, REAL_TYPE))
7445 return NULL_TREE;
7446
7447 /* If argument is already integer valued, and we don't need to worry
7448 about setting errno, there's no need to perform rounding. */
7449 if (! flag_errno_math && integer_valued_real_p (arg))
7450 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7451 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7452
7453 if (optimize)
7454 {
7455 tree ftype = TREE_TYPE (arg);
7456 tree arg0 = strip_float_extensions (arg);
7457 tree newtype = TREE_TYPE (arg0);
7458 tree decl;
7459
7460 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7461 && (decl = mathfn_built_in (newtype, fcode)))
7462 return build_call_expr_loc (loc, decl, 1,
7463 fold_convert_loc (loc, newtype, arg0));
7464 }
7465
7466 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7467 sizeof (int) == sizeof (long). */
7468 if (TYPE_PRECISION (integer_type_node)
7469 == TYPE_PRECISION (long_integer_type_node))
7470 {
7471 tree newfn = NULL_TREE;
7472 switch (fcode)
7473 {
7474 CASE_FLT_FN (BUILT_IN_ICEIL):
7475 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7476 break;
7477
7478 CASE_FLT_FN (BUILT_IN_IFLOOR):
7479 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7480 break;
7481
7482 CASE_FLT_FN (BUILT_IN_IROUND):
7483 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7484 break;
7485
7486 CASE_FLT_FN (BUILT_IN_IRINT):
7487 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7488 break;
7489
7490 default:
7491 break;
7492 }
7493
7494 if (newfn)
7495 {
7496 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7497 return fold_convert_loc (loc,
7498 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7499 }
7500 }
7501
7502 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7503 sizeof (long long) == sizeof (long). */
7504 if (TYPE_PRECISION (long_long_integer_type_node)
7505 == TYPE_PRECISION (long_integer_type_node))
7506 {
7507 tree newfn = NULL_TREE;
7508 switch (fcode)
7509 {
7510 CASE_FLT_FN (BUILT_IN_LLCEIL):
7511 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7512 break;
7513
7514 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7515 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7516 break;
7517
7518 CASE_FLT_FN (BUILT_IN_LLROUND):
7519 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7520 break;
7521
7522 CASE_FLT_FN (BUILT_IN_LLRINT):
7523 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7524 break;
7525
7526 default:
7527 break;
7528 }
7529
7530 if (newfn)
7531 {
7532 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7533 return fold_convert_loc (loc,
7534 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7535 }
7536 }
7537
7538 return NULL_TREE;
7539 }
7540
7541 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7542 complex tree type of the result. If NEG is true, the imaginary
7543 zero is negative. */
7544
7545 static tree
7546 build_complex_cproj (tree type, bool neg)
7547 {
7548 REAL_VALUE_TYPE rinf, rzero = dconst0;
7549
7550 real_inf (&rinf);
7551 rzero.sign = neg;
7552 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7553 build_real (TREE_TYPE (type), rzero));
7554 }
7555
7556 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7557 return type. Return NULL_TREE if no simplification can be made. */
7558
7559 static tree
7560 fold_builtin_cproj (location_t loc, tree arg, tree type)
7561 {
7562 if (!validate_arg (arg, COMPLEX_TYPE)
7563 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7564 return NULL_TREE;
7565
7566 /* If there are no infinities, return arg. */
7567 if (! HONOR_INFINITIES (type))
7568 return non_lvalue_loc (loc, arg);
7569
7570 /* Calculate the result when the argument is a constant. */
7571 if (TREE_CODE (arg) == COMPLEX_CST)
7572 {
7573 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7574 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7575
7576 if (real_isinf (real) || real_isinf (imag))
7577 return build_complex_cproj (type, imag->sign);
7578 else
7579 return arg;
7580 }
7581
7582 return NULL_TREE;
7583 }
7584
7585 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7586 Return NULL_TREE if no simplification can be made. */
7587
7588 static tree
7589 fold_builtin_tan (tree arg, tree type)
7590 {
7591 enum built_in_function fcode;
7592 tree res;
7593
7594 if (!validate_arg (arg, REAL_TYPE))
7595 return NULL_TREE;
7596
7597 /* Calculate the result when the argument is a constant. */
7598 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7599 return res;
7600
7601 /* Optimize tan(atan(x)) = x. */
7602 fcode = builtin_mathfn_code (arg);
7603 if (flag_unsafe_math_optimizations
7604 && (fcode == BUILT_IN_ATAN
7605 || fcode == BUILT_IN_ATANF
7606 || fcode == BUILT_IN_ATANL))
7607 return CALL_EXPR_ARG (arg, 0);
7608
7609 return NULL_TREE;
7610 }
7611
7612 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7613 NULL_TREE if no simplification can be made. */
7614
7615 static tree
7616 fold_builtin_sincos (location_t loc,
7617 tree arg0, tree arg1, tree arg2)
7618 {
7619 tree type;
7620 tree res, fn, call;
7621
7622 if (!validate_arg (arg0, REAL_TYPE)
7623 || !validate_arg (arg1, POINTER_TYPE)
7624 || !validate_arg (arg2, POINTER_TYPE))
7625 return NULL_TREE;
7626
7627 type = TREE_TYPE (arg0);
7628
7629 /* Calculate the result when the argument is a constant. */
7630 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7631 return res;
7632
7633 /* Canonicalize sincos to cexpi. */
7634 if (!targetm.libc_has_function (function_c99_math_complex))
7635 return NULL_TREE;
7636 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7637 if (!fn)
7638 return NULL_TREE;
7639
7640 call = build_call_expr_loc (loc, fn, 1, arg0);
7641 call = builtin_save_expr (call);
7642
7643 return build2 (COMPOUND_EXPR, void_type_node,
7644 build2 (MODIFY_EXPR, void_type_node,
7645 build_fold_indirect_ref_loc (loc, arg1),
7646 build1 (IMAGPART_EXPR, type, call)),
7647 build2 (MODIFY_EXPR, void_type_node,
7648 build_fold_indirect_ref_loc (loc, arg2),
7649 build1 (REALPART_EXPR, type, call)));
7650 }
7651
7652 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7653 NULL_TREE if no simplification can be made. */
7654
7655 static tree
7656 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7657 {
7658 tree rtype;
7659 tree realp, imagp, ifn;
7660 tree res;
7661
7662 if (!validate_arg (arg0, COMPLEX_TYPE)
7663 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7664 return NULL_TREE;
7665
7666 /* Calculate the result when the argument is a constant. */
7667 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7668 return res;
7669
7670 rtype = TREE_TYPE (TREE_TYPE (arg0));
7671
7672 /* In case we can figure out the real part of arg0 and it is constant zero
7673 fold to cexpi. */
7674 if (!targetm.libc_has_function (function_c99_math_complex))
7675 return NULL_TREE;
7676 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7677 if (!ifn)
7678 return NULL_TREE;
7679
7680 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7681 && real_zerop (realp))
7682 {
7683 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7684 return build_call_expr_loc (loc, ifn, 1, narg);
7685 }
7686
7687 /* In case we can easily decompose real and imaginary parts split cexp
7688 to exp (r) * cexpi (i). */
7689 if (flag_unsafe_math_optimizations
7690 && realp)
7691 {
7692 tree rfn, rcall, icall;
7693
7694 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7695 if (!rfn)
7696 return NULL_TREE;
7697
7698 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7699 if (!imagp)
7700 return NULL_TREE;
7701
7702 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7703 icall = builtin_save_expr (icall);
7704 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7705 rcall = builtin_save_expr (rcall);
7706 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7707 fold_build2_loc (loc, MULT_EXPR, rtype,
7708 rcall,
7709 fold_build1_loc (loc, REALPART_EXPR,
7710 rtype, icall)),
7711 fold_build2_loc (loc, MULT_EXPR, rtype,
7712 rcall,
7713 fold_build1_loc (loc, IMAGPART_EXPR,
7714 rtype, icall)));
7715 }
7716
7717 return NULL_TREE;
7718 }
7719
7720 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7721 Return NULL_TREE if no simplification can be made. */
7722
7723 static tree
7724 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7725 {
7726 if (!validate_arg (arg, REAL_TYPE))
7727 return NULL_TREE;
7728
7729 /* Optimize trunc of constant value. */
7730 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7731 {
7732 REAL_VALUE_TYPE r, x;
7733 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7734
7735 x = TREE_REAL_CST (arg);
7736 real_trunc (&r, TYPE_MODE (type), &x);
7737 return build_real (type, r);
7738 }
7739
7740 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7741 }
7742
7743 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7744 Return NULL_TREE if no simplification can be made. */
7745
7746 static tree
7747 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7748 {
7749 if (!validate_arg (arg, REAL_TYPE))
7750 return NULL_TREE;
7751
7752 /* Optimize floor of constant value. */
7753 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7754 {
7755 REAL_VALUE_TYPE x;
7756
7757 x = TREE_REAL_CST (arg);
7758 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7759 {
7760 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7761 REAL_VALUE_TYPE r;
7762
7763 real_floor (&r, TYPE_MODE (type), &x);
7764 return build_real (type, r);
7765 }
7766 }
7767
7768 /* Fold floor (x) where x is nonnegative to trunc (x). */
7769 if (tree_expr_nonnegative_p (arg))
7770 {
7771 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7772 if (truncfn)
7773 return build_call_expr_loc (loc, truncfn, 1, arg);
7774 }
7775
7776 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7777 }
7778
7779 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7780 Return NULL_TREE if no simplification can be made. */
7781
7782 static tree
7783 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7784 {
7785 if (!validate_arg (arg, REAL_TYPE))
7786 return NULL_TREE;
7787
7788 /* Optimize ceil of constant value. */
7789 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7790 {
7791 REAL_VALUE_TYPE x;
7792
7793 x = TREE_REAL_CST (arg);
7794 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7795 {
7796 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7797 REAL_VALUE_TYPE r;
7798
7799 real_ceil (&r, TYPE_MODE (type), &x);
7800 return build_real (type, r);
7801 }
7802 }
7803
7804 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7805 }
7806
7807 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7808 Return NULL_TREE if no simplification can be made. */
7809
7810 static tree
7811 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7812 {
7813 if (!validate_arg (arg, REAL_TYPE))
7814 return NULL_TREE;
7815
7816 /* Optimize round of constant value. */
7817 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7818 {
7819 REAL_VALUE_TYPE x;
7820
7821 x = TREE_REAL_CST (arg);
7822 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7823 {
7824 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7825 REAL_VALUE_TYPE r;
7826
7827 real_round (&r, TYPE_MODE (type), &x);
7828 return build_real (type, r);
7829 }
7830 }
7831
7832 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7833 }
7834
7835 /* Fold function call to builtin lround, lroundf or lroundl (or the
7836 corresponding long long versions) and other rounding functions. ARG
7837 is the argument to the call. Return NULL_TREE if no simplification
7838 can be made. */
7839
7840 static tree
7841 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7842 {
7843 if (!validate_arg (arg, REAL_TYPE))
7844 return NULL_TREE;
7845
7846 /* Optimize lround of constant value. */
7847 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7848 {
7849 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7850
7851 if (real_isfinite (&x))
7852 {
7853 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7854 tree ftype = TREE_TYPE (arg);
7855 REAL_VALUE_TYPE r;
7856 bool fail = false;
7857
7858 switch (DECL_FUNCTION_CODE (fndecl))
7859 {
7860 CASE_FLT_FN (BUILT_IN_IFLOOR):
7861 CASE_FLT_FN (BUILT_IN_LFLOOR):
7862 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7863 real_floor (&r, TYPE_MODE (ftype), &x);
7864 break;
7865
7866 CASE_FLT_FN (BUILT_IN_ICEIL):
7867 CASE_FLT_FN (BUILT_IN_LCEIL):
7868 CASE_FLT_FN (BUILT_IN_LLCEIL):
7869 real_ceil (&r, TYPE_MODE (ftype), &x);
7870 break;
7871
7872 CASE_FLT_FN (BUILT_IN_IROUND):
7873 CASE_FLT_FN (BUILT_IN_LROUND):
7874 CASE_FLT_FN (BUILT_IN_LLROUND):
7875 real_round (&r, TYPE_MODE (ftype), &x);
7876 break;
7877
7878 default:
7879 gcc_unreachable ();
7880 }
7881
7882 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
7883 if (!fail)
7884 return wide_int_to_tree (itype, val);
7885 }
7886 }
7887
7888 switch (DECL_FUNCTION_CODE (fndecl))
7889 {
7890 CASE_FLT_FN (BUILT_IN_LFLOOR):
7891 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7892 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7893 if (tree_expr_nonnegative_p (arg))
7894 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7895 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7896 break;
7897 default:;
7898 }
7899
7900 return fold_fixed_mathfn (loc, fndecl, arg);
7901 }
7902
7903 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7904 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7905 the argument to the call. Return NULL_TREE if no simplification can
7906 be made. */
7907
7908 static tree
7909 fold_builtin_bitop (tree fndecl, tree arg)
7910 {
7911 if (!validate_arg (arg, INTEGER_TYPE))
7912 return NULL_TREE;
7913
7914 /* Optimize for constant argument. */
7915 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7916 {
7917 tree type = TREE_TYPE (arg);
7918 int result;
7919
7920 switch (DECL_FUNCTION_CODE (fndecl))
7921 {
7922 CASE_INT_FN (BUILT_IN_FFS):
7923 result = wi::ffs (arg);
7924 break;
7925
7926 CASE_INT_FN (BUILT_IN_CLZ):
7927 if (wi::ne_p (arg, 0))
7928 result = wi::clz (arg);
7929 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7930 result = TYPE_PRECISION (type);
7931 break;
7932
7933 CASE_INT_FN (BUILT_IN_CTZ):
7934 if (wi::ne_p (arg, 0))
7935 result = wi::ctz (arg);
7936 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7937 result = TYPE_PRECISION (type);
7938 break;
7939
7940 CASE_INT_FN (BUILT_IN_CLRSB):
7941 result = wi::clrsb (arg);
7942 break;
7943
7944 CASE_INT_FN (BUILT_IN_POPCOUNT):
7945 result = wi::popcount (arg);
7946 break;
7947
7948 CASE_INT_FN (BUILT_IN_PARITY):
7949 result = wi::parity (arg);
7950 break;
7951
7952 default:
7953 gcc_unreachable ();
7954 }
7955
7956 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7957 }
7958
7959 return NULL_TREE;
7960 }
7961
7962 /* Fold function call to builtin_bswap and the short, long and long long
7963 variants. Return NULL_TREE if no simplification can be made. */
7964 static tree
7965 fold_builtin_bswap (tree fndecl, tree arg)
7966 {
7967 if (! validate_arg (arg, INTEGER_TYPE))
7968 return NULL_TREE;
7969
7970 /* Optimize constant value. */
7971 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7972 {
7973 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7974
7975 switch (DECL_FUNCTION_CODE (fndecl))
7976 {
7977 case BUILT_IN_BSWAP16:
7978 case BUILT_IN_BSWAP32:
7979 case BUILT_IN_BSWAP64:
7980 {
7981 signop sgn = TYPE_SIGN (type);
7982 tree result =
7983 wide_int_to_tree (type,
7984 wide_int::from (arg, TYPE_PRECISION (type),
7985 sgn).bswap ());
7986 return result;
7987 }
7988 default:
7989 gcc_unreachable ();
7990 }
7991 }
7992
7993 return NULL_TREE;
7994 }
7995
7996 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7997 NULL_TREE if no simplification can be made. */
7998
7999 static tree
8000 fold_builtin_hypot (location_t loc, tree arg0, tree arg1, tree type)
8001 {
8002 tree res;
8003
8004 if (!validate_arg (arg0, REAL_TYPE)
8005 || !validate_arg (arg1, REAL_TYPE))
8006 return NULL_TREE;
8007
8008 /* Calculate the result when the argument is a constant. */
8009 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8010 return res;
8011
8012 /* If either argument is zero, hypot is fabs of the other. */
8013 if (real_zerop (arg0))
8014 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8015 else if (real_zerop (arg1))
8016 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8017
8018 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8019 if (flag_unsafe_math_optimizations
8020 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8021 return fold_build2_loc (loc, MULT_EXPR, type,
8022 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8023 build_real_truncate (type, dconst_sqrt2 ()));
8024
8025 return NULL_TREE;
8026 }
8027
8028
8029 /* Fold a builtin function call to pow, powf, or powl. Return
8030 NULL_TREE if no simplification can be made. */
8031 static tree
8032 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8033 {
8034 tree res;
8035
8036 if (!validate_arg (arg0, REAL_TYPE)
8037 || !validate_arg (arg1, REAL_TYPE))
8038 return NULL_TREE;
8039
8040 /* Calculate the result when the argument is a constant. */
8041 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8042 return res;
8043
8044 /* Optimize pow(1.0,y) = 1.0. */
8045 if (real_onep (arg0))
8046 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8047
8048 if (TREE_CODE (arg1) == REAL_CST
8049 && !TREE_OVERFLOW (arg1))
8050 {
8051 REAL_VALUE_TYPE cint;
8052 REAL_VALUE_TYPE c;
8053 HOST_WIDE_INT n;
8054
8055 c = TREE_REAL_CST (arg1);
8056
8057 /* Optimize pow(x,0.0) = 1.0. */
8058 if (real_equal (&c, &dconst0))
8059 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8060 arg0);
8061
8062 /* Optimize pow(x,1.0) = x. */
8063 if (real_equal (&c, &dconst1))
8064 return arg0;
8065
8066 /* Optimize pow(x,-1.0) = 1.0/x. */
8067 if (real_equal (&c, &dconstm1))
8068 return fold_build2_loc (loc, RDIV_EXPR, type,
8069 build_real (type, dconst1), arg0);
8070
8071 /* Optimize pow(x,0.5) = sqrt(x). */
8072 if (flag_unsafe_math_optimizations
8073 && real_equal (&c, &dconsthalf))
8074 {
8075 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8076
8077 if (sqrtfn != NULL_TREE)
8078 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8079 }
8080
8081 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8082 if (flag_unsafe_math_optimizations)
8083 {
8084 const REAL_VALUE_TYPE dconstroot
8085 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8086
8087 if (real_equal (&c, &dconstroot))
8088 {
8089 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8090 if (cbrtfn != NULL_TREE)
8091 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8092 }
8093 }
8094
8095 /* Check for an integer exponent. */
8096 n = real_to_integer (&c);
8097 real_from_integer (&cint, VOIDmode, n, SIGNED);
8098 if (real_identical (&c, &cint))
8099 {
8100 /* Attempt to evaluate pow at compile-time, unless this should
8101 raise an exception. */
8102 if (TREE_CODE (arg0) == REAL_CST
8103 && !TREE_OVERFLOW (arg0)
8104 && (n > 0
8105 || (!flag_trapping_math && !flag_errno_math)
8106 || !real_equal (&TREE_REAL_CST (arg0), &dconst0)))
8107 {
8108 REAL_VALUE_TYPE x;
8109 bool inexact;
8110
8111 x = TREE_REAL_CST (arg0);
8112 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8113 if (flag_unsafe_math_optimizations || !inexact)
8114 return build_real (type, x);
8115 }
8116 }
8117 }
8118
8119 if (flag_unsafe_math_optimizations)
8120 {
8121 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8122
8123 /* Optimize pow(expN(x),y) = expN(x*y). */
8124 if (BUILTIN_EXPONENT_P (fcode))
8125 {
8126 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8127 tree arg = CALL_EXPR_ARG (arg0, 0);
8128 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8129 return build_call_expr_loc (loc, expfn, 1, arg);
8130 }
8131
8132 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8133 if (BUILTIN_SQRT_P (fcode))
8134 {
8135 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8136 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8137 build_real (type, dconsthalf));
8138 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8139 }
8140
8141 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8142 if (BUILTIN_CBRT_P (fcode))
8143 {
8144 tree arg = CALL_EXPR_ARG (arg0, 0);
8145 if (tree_expr_nonnegative_p (arg))
8146 {
8147 tree c = build_real_truncate (type, dconst_third ());
8148 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1, c);
8149 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8150 }
8151 }
8152
8153 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8154 if (fcode == BUILT_IN_POW
8155 || fcode == BUILT_IN_POWF
8156 || fcode == BUILT_IN_POWL)
8157 {
8158 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8159 if (tree_expr_nonnegative_p (arg00))
8160 {
8161 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8162 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8163 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8164 }
8165 }
8166 }
8167
8168 return NULL_TREE;
8169 }
8170
8171 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8172 Return NULL_TREE if no simplification can be made. */
8173 static tree
8174 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8175 tree arg0, tree arg1, tree type)
8176 {
8177 if (!validate_arg (arg0, REAL_TYPE)
8178 || !validate_arg (arg1, INTEGER_TYPE))
8179 return NULL_TREE;
8180
8181 /* Optimize pow(1.0,y) = 1.0. */
8182 if (real_onep (arg0))
8183 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8184
8185 if (tree_fits_shwi_p (arg1))
8186 {
8187 HOST_WIDE_INT c = tree_to_shwi (arg1);
8188
8189 /* Evaluate powi at compile-time. */
8190 if (TREE_CODE (arg0) == REAL_CST
8191 && !TREE_OVERFLOW (arg0))
8192 {
8193 REAL_VALUE_TYPE x;
8194 x = TREE_REAL_CST (arg0);
8195 real_powi (&x, TYPE_MODE (type), &x, c);
8196 return build_real (type, x);
8197 }
8198
8199 /* Optimize pow(x,0) = 1.0. */
8200 if (c == 0)
8201 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8202 arg0);
8203
8204 /* Optimize pow(x,1) = x. */
8205 if (c == 1)
8206 return arg0;
8207
8208 /* Optimize pow(x,-1) = 1.0/x. */
8209 if (c == -1)
8210 return fold_build2_loc (loc, RDIV_EXPR, type,
8211 build_real (type, dconst1), arg0);
8212 }
8213
8214 return NULL_TREE;
8215 }
8216
8217 /* A subroutine of fold_builtin to fold the various exponent
8218 functions. Return NULL_TREE if no simplification can be made.
8219 FUNC is the corresponding MPFR exponent function. */
8220
8221 static tree
8222 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8223 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8224 {
8225 if (validate_arg (arg, REAL_TYPE))
8226 {
8227 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8228 tree res;
8229
8230 /* Calculate the result when the argument is a constant. */
8231 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8232 return res;
8233
8234 /* Optimize expN(logN(x)) = x. */
8235 if (flag_unsafe_math_optimizations)
8236 {
8237 const enum built_in_function fcode = builtin_mathfn_code (arg);
8238
8239 if ((func == mpfr_exp
8240 && (fcode == BUILT_IN_LOG
8241 || fcode == BUILT_IN_LOGF
8242 || fcode == BUILT_IN_LOGL))
8243 || (func == mpfr_exp2
8244 && (fcode == BUILT_IN_LOG2
8245 || fcode == BUILT_IN_LOG2F
8246 || fcode == BUILT_IN_LOG2L))
8247 || (func == mpfr_exp10
8248 && (fcode == BUILT_IN_LOG10
8249 || fcode == BUILT_IN_LOG10F
8250 || fcode == BUILT_IN_LOG10L)))
8251 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8252 }
8253 }
8254
8255 return NULL_TREE;
8256 }
8257
8258 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8259 arguments to the call, and TYPE is its return type.
8260 Return NULL_TREE if no simplification can be made. */
8261
8262 static tree
8263 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8264 {
8265 if (!validate_arg (arg1, POINTER_TYPE)
8266 || !validate_arg (arg2, INTEGER_TYPE)
8267 || !validate_arg (len, INTEGER_TYPE))
8268 return NULL_TREE;
8269 else
8270 {
8271 const char *p1;
8272
8273 if (TREE_CODE (arg2) != INTEGER_CST
8274 || !tree_fits_uhwi_p (len))
8275 return NULL_TREE;
8276
8277 p1 = c_getstr (arg1);
8278 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8279 {
8280 char c;
8281 const char *r;
8282 tree tem;
8283
8284 if (target_char_cast (arg2, &c))
8285 return NULL_TREE;
8286
8287 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
8288
8289 if (r == NULL)
8290 return build_int_cst (TREE_TYPE (arg1), 0);
8291
8292 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8293 return fold_convert_loc (loc, type, tem);
8294 }
8295 return NULL_TREE;
8296 }
8297 }
8298
8299 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8300 Return NULL_TREE if no simplification can be made. */
8301
8302 static tree
8303 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8304 {
8305 const char *p1, *p2;
8306
8307 if (!validate_arg (arg1, POINTER_TYPE)
8308 || !validate_arg (arg2, POINTER_TYPE)
8309 || !validate_arg (len, INTEGER_TYPE))
8310 return NULL_TREE;
8311
8312 /* If the LEN parameter is zero, return zero. */
8313 if (integer_zerop (len))
8314 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8315 arg1, arg2);
8316
8317 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8318 if (operand_equal_p (arg1, arg2, 0))
8319 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8320
8321 p1 = c_getstr (arg1);
8322 p2 = c_getstr (arg2);
8323
8324 /* If all arguments are constant, and the value of len is not greater
8325 than the lengths of arg1 and arg2, evaluate at compile-time. */
8326 if (tree_fits_uhwi_p (len) && p1 && p2
8327 && compare_tree_int (len, strlen (p1) + 1) <= 0
8328 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8329 {
8330 const int r = memcmp (p1, p2, tree_to_uhwi (len));
8331
8332 if (r > 0)
8333 return integer_one_node;
8334 else if (r < 0)
8335 return integer_minus_one_node;
8336 else
8337 return integer_zero_node;
8338 }
8339
8340 /* If len parameter is one, return an expression corresponding to
8341 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8342 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8343 {
8344 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8345 tree cst_uchar_ptr_node
8346 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8347
8348 tree ind1
8349 = fold_convert_loc (loc, integer_type_node,
8350 build1 (INDIRECT_REF, cst_uchar_node,
8351 fold_convert_loc (loc,
8352 cst_uchar_ptr_node,
8353 arg1)));
8354 tree ind2
8355 = fold_convert_loc (loc, integer_type_node,
8356 build1 (INDIRECT_REF, cst_uchar_node,
8357 fold_convert_loc (loc,
8358 cst_uchar_ptr_node,
8359 arg2)));
8360 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8361 }
8362
8363 return NULL_TREE;
8364 }
8365
8366 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8367 Return NULL_TREE if no simplification can be made. */
8368
8369 static tree
8370 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8371 {
8372 const char *p1, *p2;
8373
8374 if (!validate_arg (arg1, POINTER_TYPE)
8375 || !validate_arg (arg2, POINTER_TYPE))
8376 return NULL_TREE;
8377
8378 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8379 if (operand_equal_p (arg1, arg2, 0))
8380 return integer_zero_node;
8381
8382 p1 = c_getstr (arg1);
8383 p2 = c_getstr (arg2);
8384
8385 if (p1 && p2)
8386 {
8387 const int i = strcmp (p1, p2);
8388 if (i < 0)
8389 return integer_minus_one_node;
8390 else if (i > 0)
8391 return integer_one_node;
8392 else
8393 return integer_zero_node;
8394 }
8395
8396 /* If the second arg is "", return *(const unsigned char*)arg1. */
8397 if (p2 && *p2 == '\0')
8398 {
8399 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8400 tree cst_uchar_ptr_node
8401 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8402
8403 return fold_convert_loc (loc, integer_type_node,
8404 build1 (INDIRECT_REF, cst_uchar_node,
8405 fold_convert_loc (loc,
8406 cst_uchar_ptr_node,
8407 arg1)));
8408 }
8409
8410 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8411 if (p1 && *p1 == '\0')
8412 {
8413 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8414 tree cst_uchar_ptr_node
8415 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8416
8417 tree temp
8418 = fold_convert_loc (loc, integer_type_node,
8419 build1 (INDIRECT_REF, cst_uchar_node,
8420 fold_convert_loc (loc,
8421 cst_uchar_ptr_node,
8422 arg2)));
8423 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8424 }
8425
8426 return NULL_TREE;
8427 }
8428
8429 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8430 Return NULL_TREE if no simplification can be made. */
8431
8432 static tree
8433 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8434 {
8435 const char *p1, *p2;
8436
8437 if (!validate_arg (arg1, POINTER_TYPE)
8438 || !validate_arg (arg2, POINTER_TYPE)
8439 || !validate_arg (len, INTEGER_TYPE))
8440 return NULL_TREE;
8441
8442 /* If the LEN parameter is zero, return zero. */
8443 if (integer_zerop (len))
8444 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8445 arg1, arg2);
8446
8447 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8448 if (operand_equal_p (arg1, arg2, 0))
8449 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8450
8451 p1 = c_getstr (arg1);
8452 p2 = c_getstr (arg2);
8453
8454 if (tree_fits_uhwi_p (len) && p1 && p2)
8455 {
8456 const int i = strncmp (p1, p2, tree_to_uhwi (len));
8457 if (i > 0)
8458 return integer_one_node;
8459 else if (i < 0)
8460 return integer_minus_one_node;
8461 else
8462 return integer_zero_node;
8463 }
8464
8465 /* If the second arg is "", and the length is greater than zero,
8466 return *(const unsigned char*)arg1. */
8467 if (p2 && *p2 == '\0'
8468 && TREE_CODE (len) == INTEGER_CST
8469 && tree_int_cst_sgn (len) == 1)
8470 {
8471 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8472 tree cst_uchar_ptr_node
8473 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8474
8475 return fold_convert_loc (loc, integer_type_node,
8476 build1 (INDIRECT_REF, cst_uchar_node,
8477 fold_convert_loc (loc,
8478 cst_uchar_ptr_node,
8479 arg1)));
8480 }
8481
8482 /* If the first arg is "", and the length is greater than zero,
8483 return -*(const unsigned char*)arg2. */
8484 if (p1 && *p1 == '\0'
8485 && TREE_CODE (len) == INTEGER_CST
8486 && tree_int_cst_sgn (len) == 1)
8487 {
8488 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8489 tree cst_uchar_ptr_node
8490 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8491
8492 tree temp = fold_convert_loc (loc, integer_type_node,
8493 build1 (INDIRECT_REF, cst_uchar_node,
8494 fold_convert_loc (loc,
8495 cst_uchar_ptr_node,
8496 arg2)));
8497 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8498 }
8499
8500 /* If len parameter is one, return an expression corresponding to
8501 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8502 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8503 {
8504 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8505 tree cst_uchar_ptr_node
8506 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8507
8508 tree ind1 = fold_convert_loc (loc, integer_type_node,
8509 build1 (INDIRECT_REF, cst_uchar_node,
8510 fold_convert_loc (loc,
8511 cst_uchar_ptr_node,
8512 arg1)));
8513 tree ind2 = fold_convert_loc (loc, integer_type_node,
8514 build1 (INDIRECT_REF, cst_uchar_node,
8515 fold_convert_loc (loc,
8516 cst_uchar_ptr_node,
8517 arg2)));
8518 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8519 }
8520
8521 return NULL_TREE;
8522 }
8523
8524 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8525 ARG. Return NULL_TREE if no simplification can be made. */
8526
8527 static tree
8528 fold_builtin_signbit (location_t loc, tree arg, tree type)
8529 {
8530 if (!validate_arg (arg, REAL_TYPE))
8531 return NULL_TREE;
8532
8533 /* If ARG is a compile-time constant, determine the result. */
8534 if (TREE_CODE (arg) == REAL_CST
8535 && !TREE_OVERFLOW (arg))
8536 {
8537 REAL_VALUE_TYPE c;
8538
8539 c = TREE_REAL_CST (arg);
8540 return (REAL_VALUE_NEGATIVE (c)
8541 ? build_one_cst (type)
8542 : build_zero_cst (type));
8543 }
8544
8545 /* If ARG is non-negative, the result is always zero. */
8546 if (tree_expr_nonnegative_p (arg))
8547 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8548
8549 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8550 if (!HONOR_SIGNED_ZEROS (arg))
8551 return fold_convert (type,
8552 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8553 build_real (TREE_TYPE (arg), dconst0)));
8554
8555 return NULL_TREE;
8556 }
8557
8558 /* Fold function call to builtin copysign, copysignf or copysignl with
8559 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8560 be made. */
8561
8562 static tree
8563 fold_builtin_copysign (location_t loc, tree arg1, tree arg2, tree type)
8564 {
8565 if (!validate_arg (arg1, REAL_TYPE)
8566 || !validate_arg (arg2, REAL_TYPE))
8567 return NULL_TREE;
8568
8569 /* copysign(X,X) is X. */
8570 if (operand_equal_p (arg1, arg2, 0))
8571 return fold_convert_loc (loc, type, arg1);
8572
8573 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8574 if (TREE_CODE (arg1) == REAL_CST
8575 && TREE_CODE (arg2) == REAL_CST
8576 && !TREE_OVERFLOW (arg1)
8577 && !TREE_OVERFLOW (arg2))
8578 {
8579 REAL_VALUE_TYPE c1, c2;
8580
8581 c1 = TREE_REAL_CST (arg1);
8582 c2 = TREE_REAL_CST (arg2);
8583 /* c1.sign := c2.sign. */
8584 real_copysign (&c1, &c2);
8585 return build_real (type, c1);
8586 }
8587
8588 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8589 Remember to evaluate Y for side-effects. */
8590 if (tree_expr_nonnegative_p (arg2))
8591 return omit_one_operand_loc (loc, type,
8592 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8593 arg2);
8594
8595 return NULL_TREE;
8596 }
8597
8598 /* Fold a call to builtin isascii with argument ARG. */
8599
8600 static tree
8601 fold_builtin_isascii (location_t loc, tree arg)
8602 {
8603 if (!validate_arg (arg, INTEGER_TYPE))
8604 return NULL_TREE;
8605 else
8606 {
8607 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8608 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8609 build_int_cst (integer_type_node,
8610 ~ (unsigned HOST_WIDE_INT) 0x7f));
8611 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8612 arg, integer_zero_node);
8613 }
8614 }
8615
8616 /* Fold a call to builtin toascii with argument ARG. */
8617
8618 static tree
8619 fold_builtin_toascii (location_t loc, tree arg)
8620 {
8621 if (!validate_arg (arg, INTEGER_TYPE))
8622 return NULL_TREE;
8623
8624 /* Transform toascii(c) -> (c & 0x7f). */
8625 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8626 build_int_cst (integer_type_node, 0x7f));
8627 }
8628
8629 /* Fold a call to builtin isdigit with argument ARG. */
8630
8631 static tree
8632 fold_builtin_isdigit (location_t loc, tree arg)
8633 {
8634 if (!validate_arg (arg, INTEGER_TYPE))
8635 return NULL_TREE;
8636 else
8637 {
8638 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8639 /* According to the C standard, isdigit is unaffected by locale.
8640 However, it definitely is affected by the target character set. */
8641 unsigned HOST_WIDE_INT target_digit0
8642 = lang_hooks.to_target_charset ('0');
8643
8644 if (target_digit0 == 0)
8645 return NULL_TREE;
8646
8647 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8648 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8649 build_int_cst (unsigned_type_node, target_digit0));
8650 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8651 build_int_cst (unsigned_type_node, 9));
8652 }
8653 }
8654
8655 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8656
8657 static tree
8658 fold_builtin_fabs (location_t loc, tree arg, tree type)
8659 {
8660 if (!validate_arg (arg, REAL_TYPE))
8661 return NULL_TREE;
8662
8663 arg = fold_convert_loc (loc, type, arg);
8664 if (TREE_CODE (arg) == REAL_CST)
8665 return fold_abs_const (arg, type);
8666 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8667 }
8668
8669 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8670
8671 static tree
8672 fold_builtin_abs (location_t loc, tree arg, tree type)
8673 {
8674 if (!validate_arg (arg, INTEGER_TYPE))
8675 return NULL_TREE;
8676
8677 arg = fold_convert_loc (loc, type, arg);
8678 if (TREE_CODE (arg) == INTEGER_CST)
8679 return fold_abs_const (arg, type);
8680 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8681 }
8682
8683 /* Fold a fma operation with arguments ARG[012]. */
8684
8685 tree
8686 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8687 tree type, tree arg0, tree arg1, tree arg2)
8688 {
8689 if (TREE_CODE (arg0) == REAL_CST
8690 && TREE_CODE (arg1) == REAL_CST
8691 && TREE_CODE (arg2) == REAL_CST)
8692 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8693
8694 return NULL_TREE;
8695 }
8696
8697 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8698
8699 static tree
8700 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8701 {
8702 if (validate_arg (arg0, REAL_TYPE)
8703 && validate_arg (arg1, REAL_TYPE)
8704 && validate_arg (arg2, REAL_TYPE))
8705 {
8706 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8707 if (tem)
8708 return tem;
8709
8710 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8711 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8712 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8713 }
8714 return NULL_TREE;
8715 }
8716
8717 /* Fold a call to builtin fmin or fmax. */
8718
8719 static tree
8720 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8721 tree type, bool max)
8722 {
8723 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8724 {
8725 /* Calculate the result when the argument is a constant. */
8726 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8727
8728 if (res)
8729 return res;
8730
8731 /* If either argument is NaN, return the other one. Avoid the
8732 transformation if we get (and honor) a signalling NaN. Using
8733 omit_one_operand() ensures we create a non-lvalue. */
8734 if (TREE_CODE (arg0) == REAL_CST
8735 && real_isnan (&TREE_REAL_CST (arg0))
8736 && (! HONOR_SNANS (arg0)
8737 || ! TREE_REAL_CST (arg0).signalling))
8738 return omit_one_operand_loc (loc, type, arg1, arg0);
8739 if (TREE_CODE (arg1) == REAL_CST
8740 && real_isnan (&TREE_REAL_CST (arg1))
8741 && (! HONOR_SNANS (arg1)
8742 || ! TREE_REAL_CST (arg1).signalling))
8743 return omit_one_operand_loc (loc, type, arg0, arg1);
8744
8745 /* Transform fmin/fmax(x,x) -> x. */
8746 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8747 return omit_one_operand_loc (loc, type, arg0, arg1);
8748
8749 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8750 functions to return the numeric arg if the other one is NaN.
8751 These tree codes don't honor that, so only transform if
8752 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8753 handled, so we don't have to worry about it either. */
8754 if (flag_finite_math_only)
8755 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8756 fold_convert_loc (loc, type, arg0),
8757 fold_convert_loc (loc, type, arg1));
8758 }
8759 return NULL_TREE;
8760 }
8761
8762 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8763
8764 static tree
8765 fold_builtin_carg (location_t loc, tree arg, tree type)
8766 {
8767 if (validate_arg (arg, COMPLEX_TYPE)
8768 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8769 {
8770 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8771
8772 if (atan2_fn)
8773 {
8774 tree new_arg = builtin_save_expr (arg);
8775 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8776 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8777 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8778 }
8779 }
8780
8781 return NULL_TREE;
8782 }
8783
8784 /* Fold a call to builtin logb/ilogb. */
8785
8786 static tree
8787 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8788 {
8789 if (! validate_arg (arg, REAL_TYPE))
8790 return NULL_TREE;
8791
8792 STRIP_NOPS (arg);
8793
8794 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8795 {
8796 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8797
8798 switch (value->cl)
8799 {
8800 case rvc_nan:
8801 case rvc_inf:
8802 /* If arg is Inf or NaN and we're logb, return it. */
8803 if (TREE_CODE (rettype) == REAL_TYPE)
8804 {
8805 /* For logb(-Inf) we have to return +Inf. */
8806 if (real_isinf (value) && real_isneg (value))
8807 {
8808 REAL_VALUE_TYPE tem;
8809 real_inf (&tem);
8810 return build_real (rettype, tem);
8811 }
8812 return fold_convert_loc (loc, rettype, arg);
8813 }
8814 /* Fall through... */
8815 case rvc_zero:
8816 /* Zero may set errno and/or raise an exception for logb, also
8817 for ilogb we don't know FP_ILOGB0. */
8818 return NULL_TREE;
8819 case rvc_normal:
8820 /* For normal numbers, proceed iff radix == 2. In GCC,
8821 normalized significands are in the range [0.5, 1.0). We
8822 want the exponent as if they were [1.0, 2.0) so get the
8823 exponent and subtract 1. */
8824 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8825 return fold_convert_loc (loc, rettype,
8826 build_int_cst (integer_type_node,
8827 REAL_EXP (value)-1));
8828 break;
8829 }
8830 }
8831
8832 return NULL_TREE;
8833 }
8834
8835 /* Fold a call to builtin significand, if radix == 2. */
8836
8837 static tree
8838 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8839 {
8840 if (! validate_arg (arg, REAL_TYPE))
8841 return NULL_TREE;
8842
8843 STRIP_NOPS (arg);
8844
8845 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8846 {
8847 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8848
8849 switch (value->cl)
8850 {
8851 case rvc_zero:
8852 case rvc_nan:
8853 case rvc_inf:
8854 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8855 return fold_convert_loc (loc, rettype, arg);
8856 case rvc_normal:
8857 /* For normal numbers, proceed iff radix == 2. */
8858 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8859 {
8860 REAL_VALUE_TYPE result = *value;
8861 /* In GCC, normalized significands are in the range [0.5,
8862 1.0). We want them to be [1.0, 2.0) so set the
8863 exponent to 1. */
8864 SET_REAL_EXP (&result, 1);
8865 return build_real (rettype, result);
8866 }
8867 break;
8868 }
8869 }
8870
8871 return NULL_TREE;
8872 }
8873
8874 /* Fold a call to builtin frexp, we can assume the base is 2. */
8875
8876 static tree
8877 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8878 {
8879 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8880 return NULL_TREE;
8881
8882 STRIP_NOPS (arg0);
8883
8884 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8885 return NULL_TREE;
8886
8887 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8888
8889 /* Proceed if a valid pointer type was passed in. */
8890 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8891 {
8892 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8893 tree frac, exp;
8894
8895 switch (value->cl)
8896 {
8897 case rvc_zero:
8898 /* For +-0, return (*exp = 0, +-0). */
8899 exp = integer_zero_node;
8900 frac = arg0;
8901 break;
8902 case rvc_nan:
8903 case rvc_inf:
8904 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8905 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8906 case rvc_normal:
8907 {
8908 /* Since the frexp function always expects base 2, and in
8909 GCC normalized significands are already in the range
8910 [0.5, 1.0), we have exactly what frexp wants. */
8911 REAL_VALUE_TYPE frac_rvt = *value;
8912 SET_REAL_EXP (&frac_rvt, 0);
8913 frac = build_real (rettype, frac_rvt);
8914 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8915 }
8916 break;
8917 default:
8918 gcc_unreachable ();
8919 }
8920
8921 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8922 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8923 TREE_SIDE_EFFECTS (arg1) = 1;
8924 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8925 }
8926
8927 return NULL_TREE;
8928 }
8929
8930 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8931 then we can assume the base is two. If it's false, then we have to
8932 check the mode of the TYPE parameter in certain cases. */
8933
8934 static tree
8935 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
8936 tree type, bool ldexp)
8937 {
8938 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
8939 {
8940 STRIP_NOPS (arg0);
8941 STRIP_NOPS (arg1);
8942
8943 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
8944 if (real_zerop (arg0) || integer_zerop (arg1)
8945 || (TREE_CODE (arg0) == REAL_CST
8946 && !real_isfinite (&TREE_REAL_CST (arg0))))
8947 return omit_one_operand_loc (loc, type, arg0, arg1);
8948
8949 /* If both arguments are constant, then try to evaluate it. */
8950 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
8951 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
8952 && tree_fits_shwi_p (arg1))
8953 {
8954 /* Bound the maximum adjustment to twice the range of the
8955 mode's valid exponents. Use abs to ensure the range is
8956 positive as a sanity check. */
8957 const long max_exp_adj = 2 *
8958 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
8959 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
8960
8961 /* Get the user-requested adjustment. */
8962 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
8963
8964 /* The requested adjustment must be inside this range. This
8965 is a preliminary cap to avoid things like overflow, we
8966 may still fail to compute the result for other reasons. */
8967 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
8968 {
8969 REAL_VALUE_TYPE initial_result;
8970
8971 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
8972
8973 /* Ensure we didn't overflow. */
8974 if (! real_isinf (&initial_result))
8975 {
8976 const REAL_VALUE_TYPE trunc_result
8977 = real_value_truncate (TYPE_MODE (type), initial_result);
8978
8979 /* Only proceed if the target mode can hold the
8980 resulting value. */
8981 if (real_equal (&initial_result, &trunc_result))
8982 return build_real (type, trunc_result);
8983 }
8984 }
8985 }
8986 }
8987
8988 return NULL_TREE;
8989 }
8990
8991 /* Fold a call to builtin modf. */
8992
8993 static tree
8994 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8995 {
8996 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8997 return NULL_TREE;
8998
8999 STRIP_NOPS (arg0);
9000
9001 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9002 return NULL_TREE;
9003
9004 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9005
9006 /* Proceed if a valid pointer type was passed in. */
9007 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9008 {
9009 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9010 REAL_VALUE_TYPE trunc, frac;
9011
9012 switch (value->cl)
9013 {
9014 case rvc_nan:
9015 case rvc_zero:
9016 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9017 trunc = frac = *value;
9018 break;
9019 case rvc_inf:
9020 /* For +-Inf, return (*arg1 = arg0, +-0). */
9021 frac = dconst0;
9022 frac.sign = value->sign;
9023 trunc = *value;
9024 break;
9025 case rvc_normal:
9026 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9027 real_trunc (&trunc, VOIDmode, value);
9028 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9029 /* If the original number was negative and already
9030 integral, then the fractional part is -0.0. */
9031 if (value->sign && frac.cl == rvc_zero)
9032 frac.sign = value->sign;
9033 break;
9034 }
9035
9036 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9037 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9038 build_real (rettype, trunc));
9039 TREE_SIDE_EFFECTS (arg1) = 1;
9040 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9041 build_real (rettype, frac));
9042 }
9043
9044 return NULL_TREE;
9045 }
9046
9047 /* Given a location LOC, an interclass builtin function decl FNDECL
9048 and its single argument ARG, return an folded expression computing
9049 the same, or NULL_TREE if we either couldn't or didn't want to fold
9050 (the latter happen if there's an RTL instruction available). */
9051
9052 static tree
9053 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9054 {
9055 machine_mode mode;
9056
9057 if (!validate_arg (arg, REAL_TYPE))
9058 return NULL_TREE;
9059
9060 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9061 return NULL_TREE;
9062
9063 mode = TYPE_MODE (TREE_TYPE (arg));
9064
9065 /* If there is no optab, try generic code. */
9066 switch (DECL_FUNCTION_CODE (fndecl))
9067 {
9068 tree result;
9069
9070 CASE_FLT_FN (BUILT_IN_ISINF):
9071 {
9072 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9073 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9074 tree const type = TREE_TYPE (arg);
9075 REAL_VALUE_TYPE r;
9076 char buf[128];
9077
9078 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9079 real_from_string (&r, buf);
9080 result = build_call_expr (isgr_fn, 2,
9081 fold_build1_loc (loc, ABS_EXPR, type, arg),
9082 build_real (type, r));
9083 return result;
9084 }
9085 CASE_FLT_FN (BUILT_IN_FINITE):
9086 case BUILT_IN_ISFINITE:
9087 {
9088 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9089 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9090 tree const type = TREE_TYPE (arg);
9091 REAL_VALUE_TYPE r;
9092 char buf[128];
9093
9094 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9095 real_from_string (&r, buf);
9096 result = build_call_expr (isle_fn, 2,
9097 fold_build1_loc (loc, ABS_EXPR, type, arg),
9098 build_real (type, r));
9099 /*result = fold_build2_loc (loc, UNGT_EXPR,
9100 TREE_TYPE (TREE_TYPE (fndecl)),
9101 fold_build1_loc (loc, ABS_EXPR, type, arg),
9102 build_real (type, r));
9103 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9104 TREE_TYPE (TREE_TYPE (fndecl)),
9105 result);*/
9106 return result;
9107 }
9108 case BUILT_IN_ISNORMAL:
9109 {
9110 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9111 islessequal(fabs(x),DBL_MAX). */
9112 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9113 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9114 tree const type = TREE_TYPE (arg);
9115 REAL_VALUE_TYPE rmax, rmin;
9116 char buf[128];
9117
9118 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9119 real_from_string (&rmax, buf);
9120 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9121 real_from_string (&rmin, buf);
9122 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9123 result = build_call_expr (isle_fn, 2, arg,
9124 build_real (type, rmax));
9125 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9126 build_call_expr (isge_fn, 2, arg,
9127 build_real (type, rmin)));
9128 return result;
9129 }
9130 default:
9131 break;
9132 }
9133
9134 return NULL_TREE;
9135 }
9136
9137 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9138 ARG is the argument for the call. */
9139
9140 static tree
9141 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9142 {
9143 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9144 REAL_VALUE_TYPE r;
9145
9146 if (!validate_arg (arg, REAL_TYPE))
9147 return NULL_TREE;
9148
9149 switch (builtin_index)
9150 {
9151 case BUILT_IN_ISINF:
9152 if (!HONOR_INFINITIES (arg))
9153 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9154
9155 if (TREE_CODE (arg) == REAL_CST)
9156 {
9157 r = TREE_REAL_CST (arg);
9158 if (real_isinf (&r))
9159 return real_compare (GT_EXPR, &r, &dconst0)
9160 ? integer_one_node : integer_minus_one_node;
9161 else
9162 return integer_zero_node;
9163 }
9164
9165 return NULL_TREE;
9166
9167 case BUILT_IN_ISINF_SIGN:
9168 {
9169 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9170 /* In a boolean context, GCC will fold the inner COND_EXPR to
9171 1. So e.g. "if (isinf_sign(x))" would be folded to just
9172 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9173 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9174 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9175 tree tmp = NULL_TREE;
9176
9177 arg = builtin_save_expr (arg);
9178
9179 if (signbit_fn && isinf_fn)
9180 {
9181 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9182 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9183
9184 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9185 signbit_call, integer_zero_node);
9186 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9187 isinf_call, integer_zero_node);
9188
9189 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9190 integer_minus_one_node, integer_one_node);
9191 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9192 isinf_call, tmp,
9193 integer_zero_node);
9194 }
9195
9196 return tmp;
9197 }
9198
9199 case BUILT_IN_ISFINITE:
9200 if (!HONOR_NANS (arg)
9201 && !HONOR_INFINITIES (arg))
9202 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9203
9204 if (TREE_CODE (arg) == REAL_CST)
9205 {
9206 r = TREE_REAL_CST (arg);
9207 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9208 }
9209
9210 return NULL_TREE;
9211
9212 case BUILT_IN_ISNAN:
9213 if (!HONOR_NANS (arg))
9214 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9215
9216 if (TREE_CODE (arg) == REAL_CST)
9217 {
9218 r = TREE_REAL_CST (arg);
9219 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9220 }
9221
9222 arg = builtin_save_expr (arg);
9223 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9224
9225 default:
9226 gcc_unreachable ();
9227 }
9228 }
9229
9230 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9231 This builtin will generate code to return the appropriate floating
9232 point classification depending on the value of the floating point
9233 number passed in. The possible return values must be supplied as
9234 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9235 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9236 one floating point argument which is "type generic". */
9237
9238 static tree
9239 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9240 {
9241 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9242 arg, type, res, tmp;
9243 machine_mode mode;
9244 REAL_VALUE_TYPE r;
9245 char buf[128];
9246
9247 /* Verify the required arguments in the original call. */
9248 if (nargs != 6
9249 || !validate_arg (args[0], INTEGER_TYPE)
9250 || !validate_arg (args[1], INTEGER_TYPE)
9251 || !validate_arg (args[2], INTEGER_TYPE)
9252 || !validate_arg (args[3], INTEGER_TYPE)
9253 || !validate_arg (args[4], INTEGER_TYPE)
9254 || !validate_arg (args[5], REAL_TYPE))
9255 return NULL_TREE;
9256
9257 fp_nan = args[0];
9258 fp_infinite = args[1];
9259 fp_normal = args[2];
9260 fp_subnormal = args[3];
9261 fp_zero = args[4];
9262 arg = args[5];
9263 type = TREE_TYPE (arg);
9264 mode = TYPE_MODE (type);
9265 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9266
9267 /* fpclassify(x) ->
9268 isnan(x) ? FP_NAN :
9269 (fabs(x) == Inf ? FP_INFINITE :
9270 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9271 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9272
9273 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9274 build_real (type, dconst0));
9275 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9276 tmp, fp_zero, fp_subnormal);
9277
9278 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9279 real_from_string (&r, buf);
9280 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9281 arg, build_real (type, r));
9282 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9283
9284 if (HONOR_INFINITIES (mode))
9285 {
9286 real_inf (&r);
9287 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9288 build_real (type, r));
9289 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9290 fp_infinite, res);
9291 }
9292
9293 if (HONOR_NANS (mode))
9294 {
9295 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9296 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9297 }
9298
9299 return res;
9300 }
9301
9302 /* Fold a call to an unordered comparison function such as
9303 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9304 being called and ARG0 and ARG1 are the arguments for the call.
9305 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9306 the opposite of the desired result. UNORDERED_CODE is used
9307 for modes that can hold NaNs and ORDERED_CODE is used for
9308 the rest. */
9309
9310 static tree
9311 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9312 enum tree_code unordered_code,
9313 enum tree_code ordered_code)
9314 {
9315 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9316 enum tree_code code;
9317 tree type0, type1;
9318 enum tree_code code0, code1;
9319 tree cmp_type = NULL_TREE;
9320
9321 type0 = TREE_TYPE (arg0);
9322 type1 = TREE_TYPE (arg1);
9323
9324 code0 = TREE_CODE (type0);
9325 code1 = TREE_CODE (type1);
9326
9327 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9328 /* Choose the wider of two real types. */
9329 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9330 ? type0 : type1;
9331 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9332 cmp_type = type0;
9333 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9334 cmp_type = type1;
9335
9336 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9337 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9338
9339 if (unordered_code == UNORDERED_EXPR)
9340 {
9341 if (!HONOR_NANS (arg0))
9342 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9343 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9344 }
9345
9346 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9347 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9348 fold_build2_loc (loc, code, type, arg0, arg1));
9349 }
9350
9351 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9352 arithmetics if it can never overflow, or into internal functions that
9353 return both result of arithmetics and overflowed boolean flag in
9354 a complex integer result, or some other check for overflow. */
9355
9356 static tree
9357 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9358 tree arg0, tree arg1, tree arg2)
9359 {
9360 enum internal_fn ifn = IFN_LAST;
9361 tree type = TREE_TYPE (TREE_TYPE (arg2));
9362 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9363 switch (fcode)
9364 {
9365 case BUILT_IN_ADD_OVERFLOW:
9366 case BUILT_IN_SADD_OVERFLOW:
9367 case BUILT_IN_SADDL_OVERFLOW:
9368 case BUILT_IN_SADDLL_OVERFLOW:
9369 case BUILT_IN_UADD_OVERFLOW:
9370 case BUILT_IN_UADDL_OVERFLOW:
9371 case BUILT_IN_UADDLL_OVERFLOW:
9372 ifn = IFN_ADD_OVERFLOW;
9373 break;
9374 case BUILT_IN_SUB_OVERFLOW:
9375 case BUILT_IN_SSUB_OVERFLOW:
9376 case BUILT_IN_SSUBL_OVERFLOW:
9377 case BUILT_IN_SSUBLL_OVERFLOW:
9378 case BUILT_IN_USUB_OVERFLOW:
9379 case BUILT_IN_USUBL_OVERFLOW:
9380 case BUILT_IN_USUBLL_OVERFLOW:
9381 ifn = IFN_SUB_OVERFLOW;
9382 break;
9383 case BUILT_IN_MUL_OVERFLOW:
9384 case BUILT_IN_SMUL_OVERFLOW:
9385 case BUILT_IN_SMULL_OVERFLOW:
9386 case BUILT_IN_SMULLL_OVERFLOW:
9387 case BUILT_IN_UMUL_OVERFLOW:
9388 case BUILT_IN_UMULL_OVERFLOW:
9389 case BUILT_IN_UMULLL_OVERFLOW:
9390 ifn = IFN_MUL_OVERFLOW;
9391 break;
9392 default:
9393 gcc_unreachable ();
9394 }
9395 tree ctype = build_complex_type (type);
9396 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9397 2, arg0, arg1);
9398 tree tgt = save_expr (call);
9399 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9400 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9401 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9402 tree store
9403 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9404 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9405 }
9406
9407 /* Fold a call to built-in function FNDECL with 0 arguments.
9408 This function returns NULL_TREE if no simplification was possible. */
9409
9410 static tree
9411 fold_builtin_0 (location_t loc, tree fndecl)
9412 {
9413 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9414 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9415 switch (fcode)
9416 {
9417 CASE_FLT_FN (BUILT_IN_INF):
9418 case BUILT_IN_INFD32:
9419 case BUILT_IN_INFD64:
9420 case BUILT_IN_INFD128:
9421 return fold_builtin_inf (loc, type, true);
9422
9423 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9424 return fold_builtin_inf (loc, type, false);
9425
9426 case BUILT_IN_CLASSIFY_TYPE:
9427 return fold_builtin_classify_type (NULL_TREE);
9428
9429 default:
9430 break;
9431 }
9432 return NULL_TREE;
9433 }
9434
9435 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9436 This function returns NULL_TREE if no simplification was possible. */
9437
9438 static tree
9439 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9440 {
9441 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9442 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9443 switch (fcode)
9444 {
9445 case BUILT_IN_CONSTANT_P:
9446 {
9447 tree val = fold_builtin_constant_p (arg0);
9448
9449 /* Gimplification will pull the CALL_EXPR for the builtin out of
9450 an if condition. When not optimizing, we'll not CSE it back.
9451 To avoid link error types of regressions, return false now. */
9452 if (!val && !optimize)
9453 val = integer_zero_node;
9454
9455 return val;
9456 }
9457
9458 case BUILT_IN_CLASSIFY_TYPE:
9459 return fold_builtin_classify_type (arg0);
9460
9461 case BUILT_IN_STRLEN:
9462 return fold_builtin_strlen (loc, type, arg0);
9463
9464 CASE_FLT_FN (BUILT_IN_FABS):
9465 case BUILT_IN_FABSD32:
9466 case BUILT_IN_FABSD64:
9467 case BUILT_IN_FABSD128:
9468 return fold_builtin_fabs (loc, arg0, type);
9469
9470 case BUILT_IN_ABS:
9471 case BUILT_IN_LABS:
9472 case BUILT_IN_LLABS:
9473 case BUILT_IN_IMAXABS:
9474 return fold_builtin_abs (loc, arg0, type);
9475
9476 CASE_FLT_FN (BUILT_IN_CONJ):
9477 if (validate_arg (arg0, COMPLEX_TYPE)
9478 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9479 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9480 break;
9481
9482 CASE_FLT_FN (BUILT_IN_CREAL):
9483 if (validate_arg (arg0, COMPLEX_TYPE)
9484 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9485 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9486 break;
9487
9488 CASE_FLT_FN (BUILT_IN_CIMAG):
9489 if (validate_arg (arg0, COMPLEX_TYPE)
9490 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9491 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9492 break;
9493
9494 CASE_FLT_FN (BUILT_IN_CCOS):
9495 if (validate_arg (arg0, COMPLEX_TYPE)
9496 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9497 return do_mpc_arg1 (arg0, type, mpc_cos);
9498 break;
9499
9500 CASE_FLT_FN (BUILT_IN_CCOSH):
9501 if (validate_arg (arg0, COMPLEX_TYPE)
9502 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9503 return do_mpc_arg1 (arg0, type, mpc_cosh);
9504 break;
9505
9506 CASE_FLT_FN (BUILT_IN_CPROJ):
9507 return fold_builtin_cproj (loc, arg0, type);
9508
9509 CASE_FLT_FN (BUILT_IN_CSIN):
9510 if (validate_arg (arg0, COMPLEX_TYPE)
9511 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9512 return do_mpc_arg1 (arg0, type, mpc_sin);
9513 break;
9514
9515 CASE_FLT_FN (BUILT_IN_CSINH):
9516 if (validate_arg (arg0, COMPLEX_TYPE)
9517 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9518 return do_mpc_arg1 (arg0, type, mpc_sinh);
9519 break;
9520
9521 CASE_FLT_FN (BUILT_IN_CTAN):
9522 if (validate_arg (arg0, COMPLEX_TYPE)
9523 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9524 return do_mpc_arg1 (arg0, type, mpc_tan);
9525 break;
9526
9527 CASE_FLT_FN (BUILT_IN_CTANH):
9528 if (validate_arg (arg0, COMPLEX_TYPE)
9529 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9530 return do_mpc_arg1 (arg0, type, mpc_tanh);
9531 break;
9532
9533 CASE_FLT_FN (BUILT_IN_CLOG):
9534 if (validate_arg (arg0, COMPLEX_TYPE)
9535 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9536 return do_mpc_arg1 (arg0, type, mpc_log);
9537 break;
9538
9539 CASE_FLT_FN (BUILT_IN_CSQRT):
9540 if (validate_arg (arg0, COMPLEX_TYPE)
9541 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9542 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9543 break;
9544
9545 CASE_FLT_FN (BUILT_IN_CASIN):
9546 if (validate_arg (arg0, COMPLEX_TYPE)
9547 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9548 return do_mpc_arg1 (arg0, type, mpc_asin);
9549 break;
9550
9551 CASE_FLT_FN (BUILT_IN_CACOS):
9552 if (validate_arg (arg0, COMPLEX_TYPE)
9553 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9554 return do_mpc_arg1 (arg0, type, mpc_acos);
9555 break;
9556
9557 CASE_FLT_FN (BUILT_IN_CATAN):
9558 if (validate_arg (arg0, COMPLEX_TYPE)
9559 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9560 return do_mpc_arg1 (arg0, type, mpc_atan);
9561 break;
9562
9563 CASE_FLT_FN (BUILT_IN_CASINH):
9564 if (validate_arg (arg0, COMPLEX_TYPE)
9565 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9566 return do_mpc_arg1 (arg0, type, mpc_asinh);
9567 break;
9568
9569 CASE_FLT_FN (BUILT_IN_CACOSH):
9570 if (validate_arg (arg0, COMPLEX_TYPE)
9571 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9572 return do_mpc_arg1 (arg0, type, mpc_acosh);
9573 break;
9574
9575 CASE_FLT_FN (BUILT_IN_CATANH):
9576 if (validate_arg (arg0, COMPLEX_TYPE)
9577 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9578 return do_mpc_arg1 (arg0, type, mpc_atanh);
9579 break;
9580
9581 CASE_FLT_FN (BUILT_IN_CABS):
9582 if (TREE_CODE (arg0) == COMPLEX_CST
9583 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9584 return do_mpfr_arg2 (TREE_REALPART (arg0), TREE_IMAGPART (arg0),
9585 type, mpfr_hypot);
9586 break;
9587
9588 CASE_FLT_FN (BUILT_IN_CARG):
9589 return fold_builtin_carg (loc, arg0, type);
9590
9591 CASE_FLT_FN (BUILT_IN_SQRT):
9592 if (validate_arg (arg0, REAL_TYPE))
9593 return do_mpfr_arg1 (arg0, type, mpfr_sqrt, &dconst0, NULL, true);
9594 break;
9595
9596 CASE_FLT_FN (BUILT_IN_CBRT):
9597 if (validate_arg (arg0, REAL_TYPE))
9598 return do_mpfr_arg1 (arg0, type, mpfr_cbrt, NULL, NULL, 0);
9599 break;
9600
9601 CASE_FLT_FN (BUILT_IN_ASIN):
9602 if (validate_arg (arg0, REAL_TYPE))
9603 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9604 &dconstm1, &dconst1, true);
9605 break;
9606
9607 CASE_FLT_FN (BUILT_IN_ACOS):
9608 if (validate_arg (arg0, REAL_TYPE))
9609 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9610 &dconstm1, &dconst1, true);
9611 break;
9612
9613 CASE_FLT_FN (BUILT_IN_ATAN):
9614 if (validate_arg (arg0, REAL_TYPE))
9615 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9616 break;
9617
9618 CASE_FLT_FN (BUILT_IN_ASINH):
9619 if (validate_arg (arg0, REAL_TYPE))
9620 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9621 break;
9622
9623 CASE_FLT_FN (BUILT_IN_ACOSH):
9624 if (validate_arg (arg0, REAL_TYPE))
9625 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9626 &dconst1, NULL, true);
9627 break;
9628
9629 CASE_FLT_FN (BUILT_IN_ATANH):
9630 if (validate_arg (arg0, REAL_TYPE))
9631 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9632 &dconstm1, &dconst1, false);
9633 break;
9634
9635 CASE_FLT_FN (BUILT_IN_SIN):
9636 if (validate_arg (arg0, REAL_TYPE))
9637 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9638 break;
9639
9640 CASE_FLT_FN (BUILT_IN_COS):
9641 if (validate_arg (arg0, REAL_TYPE))
9642 return do_mpfr_arg1 (arg0, type, mpfr_cos, NULL, NULL, 0);
9643 break;
9644
9645 CASE_FLT_FN (BUILT_IN_TAN):
9646 return fold_builtin_tan (arg0, type);
9647
9648 CASE_FLT_FN (BUILT_IN_CEXP):
9649 return fold_builtin_cexp (loc, arg0, type);
9650
9651 CASE_FLT_FN (BUILT_IN_CEXPI):
9652 if (validate_arg (arg0, REAL_TYPE))
9653 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9654 break;
9655
9656 CASE_FLT_FN (BUILT_IN_SINH):
9657 if (validate_arg (arg0, REAL_TYPE))
9658 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9659 break;
9660
9661 CASE_FLT_FN (BUILT_IN_COSH):
9662 if (validate_arg (arg0, REAL_TYPE))
9663 return do_mpfr_arg1 (arg0, type, mpfr_cosh, NULL, NULL, 0);
9664 break;
9665
9666 CASE_FLT_FN (BUILT_IN_TANH):
9667 if (validate_arg (arg0, REAL_TYPE))
9668 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9669 break;
9670
9671 CASE_FLT_FN (BUILT_IN_ERF):
9672 if (validate_arg (arg0, REAL_TYPE))
9673 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9674 break;
9675
9676 CASE_FLT_FN (BUILT_IN_ERFC):
9677 if (validate_arg (arg0, REAL_TYPE))
9678 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9679 break;
9680
9681 CASE_FLT_FN (BUILT_IN_TGAMMA):
9682 if (validate_arg (arg0, REAL_TYPE))
9683 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9684 break;
9685
9686 CASE_FLT_FN (BUILT_IN_EXP):
9687 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9688
9689 CASE_FLT_FN (BUILT_IN_EXP2):
9690 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9691
9692 CASE_FLT_FN (BUILT_IN_EXP10):
9693 CASE_FLT_FN (BUILT_IN_POW10):
9694 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9695
9696 CASE_FLT_FN (BUILT_IN_EXPM1):
9697 if (validate_arg (arg0, REAL_TYPE))
9698 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9699 break;
9700
9701 CASE_FLT_FN (BUILT_IN_LOG):
9702 if (validate_arg (arg0, REAL_TYPE))
9703 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
9704 break;
9705
9706 CASE_FLT_FN (BUILT_IN_LOG2):
9707 if (validate_arg (arg0, REAL_TYPE))
9708 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
9709 break;
9710
9711 CASE_FLT_FN (BUILT_IN_LOG10):
9712 if (validate_arg (arg0, REAL_TYPE))
9713 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
9714 break;
9715
9716 CASE_FLT_FN (BUILT_IN_LOG1P):
9717 if (validate_arg (arg0, REAL_TYPE))
9718 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9719 &dconstm1, NULL, false);
9720 break;
9721
9722 CASE_FLT_FN (BUILT_IN_J0):
9723 if (validate_arg (arg0, REAL_TYPE))
9724 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9725 NULL, NULL, 0);
9726 break;
9727
9728 CASE_FLT_FN (BUILT_IN_J1):
9729 if (validate_arg (arg0, REAL_TYPE))
9730 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9731 NULL, NULL, 0);
9732 break;
9733
9734 CASE_FLT_FN (BUILT_IN_Y0):
9735 if (validate_arg (arg0, REAL_TYPE))
9736 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9737 &dconst0, NULL, false);
9738 break;
9739
9740 CASE_FLT_FN (BUILT_IN_Y1):
9741 if (validate_arg (arg0, REAL_TYPE))
9742 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9743 &dconst0, NULL, false);
9744 break;
9745
9746 CASE_FLT_FN (BUILT_IN_NAN):
9747 case BUILT_IN_NAND32:
9748 case BUILT_IN_NAND64:
9749 case BUILT_IN_NAND128:
9750 return fold_builtin_nan (arg0, type, true);
9751
9752 CASE_FLT_FN (BUILT_IN_NANS):
9753 return fold_builtin_nan (arg0, type, false);
9754
9755 CASE_FLT_FN (BUILT_IN_FLOOR):
9756 return fold_builtin_floor (loc, fndecl, arg0);
9757
9758 CASE_FLT_FN (BUILT_IN_CEIL):
9759 return fold_builtin_ceil (loc, fndecl, arg0);
9760
9761 CASE_FLT_FN (BUILT_IN_TRUNC):
9762 return fold_builtin_trunc (loc, fndecl, arg0);
9763
9764 CASE_FLT_FN (BUILT_IN_ROUND):
9765 return fold_builtin_round (loc, fndecl, arg0);
9766
9767 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9768 CASE_FLT_FN (BUILT_IN_RINT):
9769 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9770
9771 CASE_FLT_FN (BUILT_IN_ICEIL):
9772 CASE_FLT_FN (BUILT_IN_LCEIL):
9773 CASE_FLT_FN (BUILT_IN_LLCEIL):
9774 CASE_FLT_FN (BUILT_IN_LFLOOR):
9775 CASE_FLT_FN (BUILT_IN_IFLOOR):
9776 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9777 CASE_FLT_FN (BUILT_IN_IROUND):
9778 CASE_FLT_FN (BUILT_IN_LROUND):
9779 CASE_FLT_FN (BUILT_IN_LLROUND):
9780 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9781
9782 CASE_FLT_FN (BUILT_IN_IRINT):
9783 CASE_FLT_FN (BUILT_IN_LRINT):
9784 CASE_FLT_FN (BUILT_IN_LLRINT):
9785 return fold_fixed_mathfn (loc, fndecl, arg0);
9786
9787 case BUILT_IN_BSWAP16:
9788 case BUILT_IN_BSWAP32:
9789 case BUILT_IN_BSWAP64:
9790 return fold_builtin_bswap (fndecl, arg0);
9791
9792 CASE_INT_FN (BUILT_IN_FFS):
9793 CASE_INT_FN (BUILT_IN_CLZ):
9794 CASE_INT_FN (BUILT_IN_CTZ):
9795 CASE_INT_FN (BUILT_IN_CLRSB):
9796 CASE_INT_FN (BUILT_IN_POPCOUNT):
9797 CASE_INT_FN (BUILT_IN_PARITY):
9798 return fold_builtin_bitop (fndecl, arg0);
9799
9800 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9801 return fold_builtin_signbit (loc, arg0, type);
9802
9803 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9804 return fold_builtin_significand (loc, arg0, type);
9805
9806 CASE_FLT_FN (BUILT_IN_ILOGB):
9807 CASE_FLT_FN (BUILT_IN_LOGB):
9808 return fold_builtin_logb (loc, arg0, type);
9809
9810 case BUILT_IN_ISASCII:
9811 return fold_builtin_isascii (loc, arg0);
9812
9813 case BUILT_IN_TOASCII:
9814 return fold_builtin_toascii (loc, arg0);
9815
9816 case BUILT_IN_ISDIGIT:
9817 return fold_builtin_isdigit (loc, arg0);
9818
9819 CASE_FLT_FN (BUILT_IN_FINITE):
9820 case BUILT_IN_FINITED32:
9821 case BUILT_IN_FINITED64:
9822 case BUILT_IN_FINITED128:
9823 case BUILT_IN_ISFINITE:
9824 {
9825 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9826 if (ret)
9827 return ret;
9828 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9829 }
9830
9831 CASE_FLT_FN (BUILT_IN_ISINF):
9832 case BUILT_IN_ISINFD32:
9833 case BUILT_IN_ISINFD64:
9834 case BUILT_IN_ISINFD128:
9835 {
9836 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9837 if (ret)
9838 return ret;
9839 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9840 }
9841
9842 case BUILT_IN_ISNORMAL:
9843 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9844
9845 case BUILT_IN_ISINF_SIGN:
9846 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9847
9848 CASE_FLT_FN (BUILT_IN_ISNAN):
9849 case BUILT_IN_ISNAND32:
9850 case BUILT_IN_ISNAND64:
9851 case BUILT_IN_ISNAND128:
9852 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9853
9854 case BUILT_IN_FREE:
9855 if (integer_zerop (arg0))
9856 return build_empty_stmt (loc);
9857 break;
9858
9859 default:
9860 break;
9861 }
9862
9863 return NULL_TREE;
9864
9865 }
9866
9867 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9868 This function returns NULL_TREE if no simplification was possible. */
9869
9870 static tree
9871 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9872 {
9873 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9874 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9875
9876 switch (fcode)
9877 {
9878 CASE_FLT_FN (BUILT_IN_JN):
9879 if (validate_arg (arg0, INTEGER_TYPE)
9880 && validate_arg (arg1, REAL_TYPE))
9881 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9882 break;
9883
9884 CASE_FLT_FN (BUILT_IN_YN):
9885 if (validate_arg (arg0, INTEGER_TYPE)
9886 && validate_arg (arg1, REAL_TYPE))
9887 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9888 &dconst0, false);
9889 break;
9890
9891 CASE_FLT_FN (BUILT_IN_DREM):
9892 CASE_FLT_FN (BUILT_IN_REMAINDER):
9893 if (validate_arg (arg0, REAL_TYPE)
9894 && validate_arg (arg1, REAL_TYPE))
9895 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9896 break;
9897
9898 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9899 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9900 if (validate_arg (arg0, REAL_TYPE)
9901 && validate_arg (arg1, POINTER_TYPE))
9902 return do_mpfr_lgamma_r (arg0, arg1, type);
9903 break;
9904
9905 CASE_FLT_FN (BUILT_IN_ATAN2):
9906 if (validate_arg (arg0, REAL_TYPE)
9907 && validate_arg (arg1, REAL_TYPE))
9908 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9909 break;
9910
9911 CASE_FLT_FN (BUILT_IN_FDIM):
9912 if (validate_arg (arg0, REAL_TYPE)
9913 && validate_arg (arg1, REAL_TYPE))
9914 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9915 break;
9916
9917 CASE_FLT_FN (BUILT_IN_HYPOT):
9918 return fold_builtin_hypot (loc, arg0, arg1, type);
9919
9920 CASE_FLT_FN (BUILT_IN_CPOW):
9921 if (validate_arg (arg0, COMPLEX_TYPE)
9922 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9923 && validate_arg (arg1, COMPLEX_TYPE)
9924 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9925 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9926 break;
9927
9928 CASE_FLT_FN (BUILT_IN_LDEXP):
9929 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9930 CASE_FLT_FN (BUILT_IN_SCALBN):
9931 CASE_FLT_FN (BUILT_IN_SCALBLN):
9932 return fold_builtin_load_exponent (loc, arg0, arg1,
9933 type, /*ldexp=*/false);
9934
9935 CASE_FLT_FN (BUILT_IN_FREXP):
9936 return fold_builtin_frexp (loc, arg0, arg1, type);
9937
9938 CASE_FLT_FN (BUILT_IN_MODF):
9939 return fold_builtin_modf (loc, arg0, arg1, type);
9940
9941 case BUILT_IN_STRSTR:
9942 return fold_builtin_strstr (loc, arg0, arg1, type);
9943
9944 case BUILT_IN_STRSPN:
9945 return fold_builtin_strspn (loc, arg0, arg1);
9946
9947 case BUILT_IN_STRCSPN:
9948 return fold_builtin_strcspn (loc, arg0, arg1);
9949
9950 case BUILT_IN_STRCHR:
9951 case BUILT_IN_INDEX:
9952 return fold_builtin_strchr (loc, arg0, arg1, type);
9953
9954 case BUILT_IN_STRRCHR:
9955 case BUILT_IN_RINDEX:
9956 return fold_builtin_strrchr (loc, arg0, arg1, type);
9957
9958 case BUILT_IN_STRCMP:
9959 return fold_builtin_strcmp (loc, arg0, arg1);
9960
9961 case BUILT_IN_STRPBRK:
9962 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9963
9964 case BUILT_IN_EXPECT:
9965 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9966
9967 CASE_FLT_FN (BUILT_IN_POW):
9968 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
9969
9970 CASE_FLT_FN (BUILT_IN_POWI):
9971 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
9972
9973 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9974 return fold_builtin_copysign (loc, arg0, arg1, type);
9975
9976 CASE_FLT_FN (BUILT_IN_FMIN):
9977 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
9978
9979 CASE_FLT_FN (BUILT_IN_FMAX):
9980 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
9981
9982 case BUILT_IN_ISGREATER:
9983 return fold_builtin_unordered_cmp (loc, fndecl,
9984 arg0, arg1, UNLE_EXPR, LE_EXPR);
9985 case BUILT_IN_ISGREATEREQUAL:
9986 return fold_builtin_unordered_cmp (loc, fndecl,
9987 arg0, arg1, UNLT_EXPR, LT_EXPR);
9988 case BUILT_IN_ISLESS:
9989 return fold_builtin_unordered_cmp (loc, fndecl,
9990 arg0, arg1, UNGE_EXPR, GE_EXPR);
9991 case BUILT_IN_ISLESSEQUAL:
9992 return fold_builtin_unordered_cmp (loc, fndecl,
9993 arg0, arg1, UNGT_EXPR, GT_EXPR);
9994 case BUILT_IN_ISLESSGREATER:
9995 return fold_builtin_unordered_cmp (loc, fndecl,
9996 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9997 case BUILT_IN_ISUNORDERED:
9998 return fold_builtin_unordered_cmp (loc, fndecl,
9999 arg0, arg1, UNORDERED_EXPR,
10000 NOP_EXPR);
10001
10002 /* We do the folding for va_start in the expander. */
10003 case BUILT_IN_VA_START:
10004 break;
10005
10006 case BUILT_IN_OBJECT_SIZE:
10007 return fold_builtin_object_size (arg0, arg1);
10008
10009 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10010 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10011
10012 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10013 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10014
10015 default:
10016 break;
10017 }
10018 return NULL_TREE;
10019 }
10020
10021 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10022 and ARG2.
10023 This function returns NULL_TREE if no simplification was possible. */
10024
10025 static tree
10026 fold_builtin_3 (location_t loc, tree fndecl,
10027 tree arg0, tree arg1, tree arg2)
10028 {
10029 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10030 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10031 switch (fcode)
10032 {
10033
10034 CASE_FLT_FN (BUILT_IN_SINCOS):
10035 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10036
10037 CASE_FLT_FN (BUILT_IN_FMA):
10038 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10039 break;
10040
10041 CASE_FLT_FN (BUILT_IN_REMQUO):
10042 if (validate_arg (arg0, REAL_TYPE)
10043 && validate_arg (arg1, REAL_TYPE)
10044 && validate_arg (arg2, POINTER_TYPE))
10045 return do_mpfr_remquo (arg0, arg1, arg2);
10046 break;
10047
10048 case BUILT_IN_STRNCMP:
10049 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10050
10051 case BUILT_IN_MEMCHR:
10052 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10053
10054 case BUILT_IN_BCMP:
10055 case BUILT_IN_MEMCMP:
10056 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10057
10058 case BUILT_IN_EXPECT:
10059 return fold_builtin_expect (loc, arg0, arg1, arg2);
10060
10061 case BUILT_IN_ADD_OVERFLOW:
10062 case BUILT_IN_SUB_OVERFLOW:
10063 case BUILT_IN_MUL_OVERFLOW:
10064 case BUILT_IN_SADD_OVERFLOW:
10065 case BUILT_IN_SADDL_OVERFLOW:
10066 case BUILT_IN_SADDLL_OVERFLOW:
10067 case BUILT_IN_SSUB_OVERFLOW:
10068 case BUILT_IN_SSUBL_OVERFLOW:
10069 case BUILT_IN_SSUBLL_OVERFLOW:
10070 case BUILT_IN_SMUL_OVERFLOW:
10071 case BUILT_IN_SMULL_OVERFLOW:
10072 case BUILT_IN_SMULLL_OVERFLOW:
10073 case BUILT_IN_UADD_OVERFLOW:
10074 case BUILT_IN_UADDL_OVERFLOW:
10075 case BUILT_IN_UADDLL_OVERFLOW:
10076 case BUILT_IN_USUB_OVERFLOW:
10077 case BUILT_IN_USUBL_OVERFLOW:
10078 case BUILT_IN_USUBLL_OVERFLOW:
10079 case BUILT_IN_UMUL_OVERFLOW:
10080 case BUILT_IN_UMULL_OVERFLOW:
10081 case BUILT_IN_UMULLL_OVERFLOW:
10082 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10083
10084 default:
10085 break;
10086 }
10087 return NULL_TREE;
10088 }
10089
10090 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10091 arguments. IGNORE is true if the result of the
10092 function call is ignored. This function returns NULL_TREE if no
10093 simplification was possible. */
10094
10095 tree
10096 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
10097 {
10098 tree ret = NULL_TREE;
10099
10100 switch (nargs)
10101 {
10102 case 0:
10103 ret = fold_builtin_0 (loc, fndecl);
10104 break;
10105 case 1:
10106 ret = fold_builtin_1 (loc, fndecl, args[0]);
10107 break;
10108 case 2:
10109 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
10110 break;
10111 case 3:
10112 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10113 break;
10114 default:
10115 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10116 break;
10117 }
10118 if (ret)
10119 {
10120 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10121 SET_EXPR_LOCATION (ret, loc);
10122 TREE_NO_WARNING (ret) = 1;
10123 return ret;
10124 }
10125 return NULL_TREE;
10126 }
10127
10128 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10129 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10130 of arguments in ARGS to be omitted. OLDNARGS is the number of
10131 elements in ARGS. */
10132
10133 static tree
10134 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10135 int skip, tree fndecl, int n, va_list newargs)
10136 {
10137 int nargs = oldnargs - skip + n;
10138 tree *buffer;
10139
10140 if (n > 0)
10141 {
10142 int i, j;
10143
10144 buffer = XALLOCAVEC (tree, nargs);
10145 for (i = 0; i < n; i++)
10146 buffer[i] = va_arg (newargs, tree);
10147 for (j = skip; j < oldnargs; j++, i++)
10148 buffer[i] = args[j];
10149 }
10150 else
10151 buffer = args + skip;
10152
10153 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10154 }
10155
10156 /* Return true if FNDECL shouldn't be folded right now.
10157 If a built-in function has an inline attribute always_inline
10158 wrapper, defer folding it after always_inline functions have
10159 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10160 might not be performed. */
10161
10162 bool
10163 avoid_folding_inline_builtin (tree fndecl)
10164 {
10165 return (DECL_DECLARED_INLINE_P (fndecl)
10166 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10167 && cfun
10168 && !cfun->always_inline_functions_inlined
10169 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10170 }
10171
10172 /* A wrapper function for builtin folding that prevents warnings for
10173 "statement without effect" and the like, caused by removing the
10174 call node earlier than the warning is generated. */
10175
10176 tree
10177 fold_call_expr (location_t loc, tree exp, bool ignore)
10178 {
10179 tree ret = NULL_TREE;
10180 tree fndecl = get_callee_fndecl (exp);
10181 if (fndecl
10182 && TREE_CODE (fndecl) == FUNCTION_DECL
10183 && DECL_BUILT_IN (fndecl)
10184 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10185 yet. Defer folding until we see all the arguments
10186 (after inlining). */
10187 && !CALL_EXPR_VA_ARG_PACK (exp))
10188 {
10189 int nargs = call_expr_nargs (exp);
10190
10191 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10192 instead last argument is __builtin_va_arg_pack (). Defer folding
10193 even in that case, until arguments are finalized. */
10194 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10195 {
10196 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10197 if (fndecl2
10198 && TREE_CODE (fndecl2) == FUNCTION_DECL
10199 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10200 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10201 return NULL_TREE;
10202 }
10203
10204 if (avoid_folding_inline_builtin (fndecl))
10205 return NULL_TREE;
10206
10207 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10208 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10209 CALL_EXPR_ARGP (exp), ignore);
10210 else
10211 {
10212 tree *args = CALL_EXPR_ARGP (exp);
10213 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10214 if (ret)
10215 return ret;
10216 }
10217 }
10218 return NULL_TREE;
10219 }
10220
10221 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10222 N arguments are passed in the array ARGARRAY. Return a folded
10223 expression or NULL_TREE if no simplification was possible. */
10224
10225 tree
10226 fold_builtin_call_array (location_t loc, tree,
10227 tree fn,
10228 int n,
10229 tree *argarray)
10230 {
10231 if (TREE_CODE (fn) != ADDR_EXPR)
10232 return NULL_TREE;
10233
10234 tree fndecl = TREE_OPERAND (fn, 0);
10235 if (TREE_CODE (fndecl) == FUNCTION_DECL
10236 && DECL_BUILT_IN (fndecl))
10237 {
10238 /* If last argument is __builtin_va_arg_pack (), arguments to this
10239 function are not finalized yet. Defer folding until they are. */
10240 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10241 {
10242 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10243 if (fndecl2
10244 && TREE_CODE (fndecl2) == FUNCTION_DECL
10245 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10246 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10247 return NULL_TREE;
10248 }
10249 if (avoid_folding_inline_builtin (fndecl))
10250 return NULL_TREE;
10251 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10252 return targetm.fold_builtin (fndecl, n, argarray, false);
10253 else
10254 return fold_builtin_n (loc, fndecl, argarray, n, false);
10255 }
10256
10257 return NULL_TREE;
10258 }
10259
10260 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10261 along with N new arguments specified as the "..." parameters. SKIP
10262 is the number of arguments in EXP to be omitted. This function is used
10263 to do varargs-to-varargs transformations. */
10264
10265 static tree
10266 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10267 {
10268 va_list ap;
10269 tree t;
10270
10271 va_start (ap, n);
10272 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10273 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10274 va_end (ap);
10275
10276 return t;
10277 }
10278
10279 /* Validate a single argument ARG against a tree code CODE representing
10280 a type. */
10281
10282 static bool
10283 validate_arg (const_tree arg, enum tree_code code)
10284 {
10285 if (!arg)
10286 return false;
10287 else if (code == POINTER_TYPE)
10288 return POINTER_TYPE_P (TREE_TYPE (arg));
10289 else if (code == INTEGER_TYPE)
10290 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10291 return code == TREE_CODE (TREE_TYPE (arg));
10292 }
10293
10294 /* This function validates the types of a function call argument list
10295 against a specified list of tree_codes. If the last specifier is a 0,
10296 that represents an ellipses, otherwise the last specifier must be a
10297 VOID_TYPE.
10298
10299 This is the GIMPLE version of validate_arglist. Eventually we want to
10300 completely convert builtins.c to work from GIMPLEs and the tree based
10301 validate_arglist will then be removed. */
10302
10303 bool
10304 validate_gimple_arglist (const gcall *call, ...)
10305 {
10306 enum tree_code code;
10307 bool res = 0;
10308 va_list ap;
10309 const_tree arg;
10310 size_t i;
10311
10312 va_start (ap, call);
10313 i = 0;
10314
10315 do
10316 {
10317 code = (enum tree_code) va_arg (ap, int);
10318 switch (code)
10319 {
10320 case 0:
10321 /* This signifies an ellipses, any further arguments are all ok. */
10322 res = true;
10323 goto end;
10324 case VOID_TYPE:
10325 /* This signifies an endlink, if no arguments remain, return
10326 true, otherwise return false. */
10327 res = (i == gimple_call_num_args (call));
10328 goto end;
10329 default:
10330 /* If no parameters remain or the parameter's code does not
10331 match the specified code, return false. Otherwise continue
10332 checking any remaining arguments. */
10333 arg = gimple_call_arg (call, i++);
10334 if (!validate_arg (arg, code))
10335 goto end;
10336 break;
10337 }
10338 }
10339 while (1);
10340
10341 /* We need gotos here since we can only have one VA_CLOSE in a
10342 function. */
10343 end: ;
10344 va_end (ap);
10345
10346 return res;
10347 }
10348
10349 /* Default target-specific builtin expander that does nothing. */
10350
10351 rtx
10352 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10353 rtx target ATTRIBUTE_UNUSED,
10354 rtx subtarget ATTRIBUTE_UNUSED,
10355 machine_mode mode ATTRIBUTE_UNUSED,
10356 int ignore ATTRIBUTE_UNUSED)
10357 {
10358 return NULL_RTX;
10359 }
10360
10361 /* Returns true is EXP represents data that would potentially reside
10362 in a readonly section. */
10363
10364 bool
10365 readonly_data_expr (tree exp)
10366 {
10367 STRIP_NOPS (exp);
10368
10369 if (TREE_CODE (exp) != ADDR_EXPR)
10370 return false;
10371
10372 exp = get_base_address (TREE_OPERAND (exp, 0));
10373 if (!exp)
10374 return false;
10375
10376 /* Make sure we call decl_readonly_section only for trees it
10377 can handle (since it returns true for everything it doesn't
10378 understand). */
10379 if (TREE_CODE (exp) == STRING_CST
10380 || TREE_CODE (exp) == CONSTRUCTOR
10381 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10382 return decl_readonly_section (exp, 0);
10383 else
10384 return false;
10385 }
10386
10387 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10388 to the call, and TYPE is its return type.
10389
10390 Return NULL_TREE if no simplification was possible, otherwise return the
10391 simplified form of the call as a tree.
10392
10393 The simplified form may be a constant or other expression which
10394 computes the same value, but in a more efficient manner (including
10395 calls to other builtin functions).
10396
10397 The call may contain arguments which need to be evaluated, but
10398 which are not useful to determine the result of the call. In
10399 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10400 COMPOUND_EXPR will be an argument which must be evaluated.
10401 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10402 COMPOUND_EXPR in the chain will contain the tree for the simplified
10403 form of the builtin function call. */
10404
10405 static tree
10406 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10407 {
10408 if (!validate_arg (s1, POINTER_TYPE)
10409 || !validate_arg (s2, POINTER_TYPE))
10410 return NULL_TREE;
10411 else
10412 {
10413 tree fn;
10414 const char *p1, *p2;
10415
10416 p2 = c_getstr (s2);
10417 if (p2 == NULL)
10418 return NULL_TREE;
10419
10420 p1 = c_getstr (s1);
10421 if (p1 != NULL)
10422 {
10423 const char *r = strstr (p1, p2);
10424 tree tem;
10425
10426 if (r == NULL)
10427 return build_int_cst (TREE_TYPE (s1), 0);
10428
10429 /* Return an offset into the constant string argument. */
10430 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10431 return fold_convert_loc (loc, type, tem);
10432 }
10433
10434 /* The argument is const char *, and the result is char *, so we need
10435 a type conversion here to avoid a warning. */
10436 if (p2[0] == '\0')
10437 return fold_convert_loc (loc, type, s1);
10438
10439 if (p2[1] != '\0')
10440 return NULL_TREE;
10441
10442 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10443 if (!fn)
10444 return NULL_TREE;
10445
10446 /* New argument list transforming strstr(s1, s2) to
10447 strchr(s1, s2[0]). */
10448 return build_call_expr_loc (loc, fn, 2, s1,
10449 build_int_cst (integer_type_node, p2[0]));
10450 }
10451 }
10452
10453 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10454 the call, and TYPE is its return type.
10455
10456 Return NULL_TREE if no simplification was possible, otherwise return the
10457 simplified form of the call as a tree.
10458
10459 The simplified form may be a constant or other expression which
10460 computes the same value, but in a more efficient manner (including
10461 calls to other builtin functions).
10462
10463 The call may contain arguments which need to be evaluated, but
10464 which are not useful to determine the result of the call. In
10465 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10466 COMPOUND_EXPR will be an argument which must be evaluated.
10467 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10468 COMPOUND_EXPR in the chain will contain the tree for the simplified
10469 form of the builtin function call. */
10470
10471 static tree
10472 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10473 {
10474 if (!validate_arg (s1, POINTER_TYPE)
10475 || !validate_arg (s2, INTEGER_TYPE))
10476 return NULL_TREE;
10477 else
10478 {
10479 const char *p1;
10480
10481 if (TREE_CODE (s2) != INTEGER_CST)
10482 return NULL_TREE;
10483
10484 p1 = c_getstr (s1);
10485 if (p1 != NULL)
10486 {
10487 char c;
10488 const char *r;
10489 tree tem;
10490
10491 if (target_char_cast (s2, &c))
10492 return NULL_TREE;
10493
10494 r = strchr (p1, c);
10495
10496 if (r == NULL)
10497 return build_int_cst (TREE_TYPE (s1), 0);
10498
10499 /* Return an offset into the constant string argument. */
10500 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10501 return fold_convert_loc (loc, type, tem);
10502 }
10503 return NULL_TREE;
10504 }
10505 }
10506
10507 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10508 the call, and TYPE is its return type.
10509
10510 Return NULL_TREE if no simplification was possible, otherwise return the
10511 simplified form of the call as a tree.
10512
10513 The simplified form may be a constant or other expression which
10514 computes the same value, but in a more efficient manner (including
10515 calls to other builtin functions).
10516
10517 The call may contain arguments which need to be evaluated, but
10518 which are not useful to determine the result of the call. In
10519 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10520 COMPOUND_EXPR will be an argument which must be evaluated.
10521 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10522 COMPOUND_EXPR in the chain will contain the tree for the simplified
10523 form of the builtin function call. */
10524
10525 static tree
10526 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10527 {
10528 if (!validate_arg (s1, POINTER_TYPE)
10529 || !validate_arg (s2, INTEGER_TYPE))
10530 return NULL_TREE;
10531 else
10532 {
10533 tree fn;
10534 const char *p1;
10535
10536 if (TREE_CODE (s2) != INTEGER_CST)
10537 return NULL_TREE;
10538
10539 p1 = c_getstr (s1);
10540 if (p1 != NULL)
10541 {
10542 char c;
10543 const char *r;
10544 tree tem;
10545
10546 if (target_char_cast (s2, &c))
10547 return NULL_TREE;
10548
10549 r = strrchr (p1, c);
10550
10551 if (r == NULL)
10552 return build_int_cst (TREE_TYPE (s1), 0);
10553
10554 /* Return an offset into the constant string argument. */
10555 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10556 return fold_convert_loc (loc, type, tem);
10557 }
10558
10559 if (! integer_zerop (s2))
10560 return NULL_TREE;
10561
10562 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10563 if (!fn)
10564 return NULL_TREE;
10565
10566 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10567 return build_call_expr_loc (loc, fn, 2, s1, s2);
10568 }
10569 }
10570
10571 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10572 to the call, and TYPE is its return type.
10573
10574 Return NULL_TREE if no simplification was possible, otherwise return the
10575 simplified form of the call as a tree.
10576
10577 The simplified form may be a constant or other expression which
10578 computes the same value, but in a more efficient manner (including
10579 calls to other builtin functions).
10580
10581 The call may contain arguments which need to be evaluated, but
10582 which are not useful to determine the result of the call. In
10583 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10584 COMPOUND_EXPR will be an argument which must be evaluated.
10585 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10586 COMPOUND_EXPR in the chain will contain the tree for the simplified
10587 form of the builtin function call. */
10588
10589 static tree
10590 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10591 {
10592 if (!validate_arg (s1, POINTER_TYPE)
10593 || !validate_arg (s2, POINTER_TYPE))
10594 return NULL_TREE;
10595 else
10596 {
10597 tree fn;
10598 const char *p1, *p2;
10599
10600 p2 = c_getstr (s2);
10601 if (p2 == NULL)
10602 return NULL_TREE;
10603
10604 p1 = c_getstr (s1);
10605 if (p1 != NULL)
10606 {
10607 const char *r = strpbrk (p1, p2);
10608 tree tem;
10609
10610 if (r == NULL)
10611 return build_int_cst (TREE_TYPE (s1), 0);
10612
10613 /* Return an offset into the constant string argument. */
10614 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10615 return fold_convert_loc (loc, type, tem);
10616 }
10617
10618 if (p2[0] == '\0')
10619 /* strpbrk(x, "") == NULL.
10620 Evaluate and ignore s1 in case it had side-effects. */
10621 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10622
10623 if (p2[1] != '\0')
10624 return NULL_TREE; /* Really call strpbrk. */
10625
10626 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10627 if (!fn)
10628 return NULL_TREE;
10629
10630 /* New argument list transforming strpbrk(s1, s2) to
10631 strchr(s1, s2[0]). */
10632 return build_call_expr_loc (loc, fn, 2, s1,
10633 build_int_cst (integer_type_node, p2[0]));
10634 }
10635 }
10636
10637 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10638 to the call.
10639
10640 Return NULL_TREE if no simplification was possible, otherwise return the
10641 simplified form of the call as a tree.
10642
10643 The simplified form may be a constant or other expression which
10644 computes the same value, but in a more efficient manner (including
10645 calls to other builtin functions).
10646
10647 The call may contain arguments which need to be evaluated, but
10648 which are not useful to determine the result of the call. In
10649 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10650 COMPOUND_EXPR will be an argument which must be evaluated.
10651 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10652 COMPOUND_EXPR in the chain will contain the tree for the simplified
10653 form of the builtin function call. */
10654
10655 static tree
10656 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10657 {
10658 if (!validate_arg (s1, POINTER_TYPE)
10659 || !validate_arg (s2, POINTER_TYPE))
10660 return NULL_TREE;
10661 else
10662 {
10663 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10664
10665 /* If both arguments are constants, evaluate at compile-time. */
10666 if (p1 && p2)
10667 {
10668 const size_t r = strspn (p1, p2);
10669 return build_int_cst (size_type_node, r);
10670 }
10671
10672 /* If either argument is "", return NULL_TREE. */
10673 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10674 /* Evaluate and ignore both arguments in case either one has
10675 side-effects. */
10676 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10677 s1, s2);
10678 return NULL_TREE;
10679 }
10680 }
10681
10682 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10683 to the call.
10684
10685 Return NULL_TREE if no simplification was possible, otherwise return the
10686 simplified form of the call as a tree.
10687
10688 The simplified form may be a constant or other expression which
10689 computes the same value, but in a more efficient manner (including
10690 calls to other builtin functions).
10691
10692 The call may contain arguments which need to be evaluated, but
10693 which are not useful to determine the result of the call. In
10694 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10695 COMPOUND_EXPR will be an argument which must be evaluated.
10696 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10697 COMPOUND_EXPR in the chain will contain the tree for the simplified
10698 form of the builtin function call. */
10699
10700 static tree
10701 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10702 {
10703 if (!validate_arg (s1, POINTER_TYPE)
10704 || !validate_arg (s2, POINTER_TYPE))
10705 return NULL_TREE;
10706 else
10707 {
10708 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10709
10710 /* If both arguments are constants, evaluate at compile-time. */
10711 if (p1 && p2)
10712 {
10713 const size_t r = strcspn (p1, p2);
10714 return build_int_cst (size_type_node, r);
10715 }
10716
10717 /* If the first argument is "", return NULL_TREE. */
10718 if (p1 && *p1 == '\0')
10719 {
10720 /* Evaluate and ignore argument s2 in case it has
10721 side-effects. */
10722 return omit_one_operand_loc (loc, size_type_node,
10723 size_zero_node, s2);
10724 }
10725
10726 /* If the second argument is "", return __builtin_strlen(s1). */
10727 if (p2 && *p2 == '\0')
10728 {
10729 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10730
10731 /* If the replacement _DECL isn't initialized, don't do the
10732 transformation. */
10733 if (!fn)
10734 return NULL_TREE;
10735
10736 return build_call_expr_loc (loc, fn, 1, s1);
10737 }
10738 return NULL_TREE;
10739 }
10740 }
10741
10742 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10743 produced. False otherwise. This is done so that we don't output the error
10744 or warning twice or three times. */
10745
10746 bool
10747 fold_builtin_next_arg (tree exp, bool va_start_p)
10748 {
10749 tree fntype = TREE_TYPE (current_function_decl);
10750 int nargs = call_expr_nargs (exp);
10751 tree arg;
10752 /* There is good chance the current input_location points inside the
10753 definition of the va_start macro (perhaps on the token for
10754 builtin) in a system header, so warnings will not be emitted.
10755 Use the location in real source code. */
10756 source_location current_location =
10757 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10758 NULL);
10759
10760 if (!stdarg_p (fntype))
10761 {
10762 error ("%<va_start%> used in function with fixed args");
10763 return true;
10764 }
10765
10766 if (va_start_p)
10767 {
10768 if (va_start_p && (nargs != 2))
10769 {
10770 error ("wrong number of arguments to function %<va_start%>");
10771 return true;
10772 }
10773 arg = CALL_EXPR_ARG (exp, 1);
10774 }
10775 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10776 when we checked the arguments and if needed issued a warning. */
10777 else
10778 {
10779 if (nargs == 0)
10780 {
10781 /* Evidently an out of date version of <stdarg.h>; can't validate
10782 va_start's second argument, but can still work as intended. */
10783 warning_at (current_location,
10784 OPT_Wvarargs,
10785 "%<__builtin_next_arg%> called without an argument");
10786 return true;
10787 }
10788 else if (nargs > 1)
10789 {
10790 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10791 return true;
10792 }
10793 arg = CALL_EXPR_ARG (exp, 0);
10794 }
10795
10796 if (TREE_CODE (arg) == SSA_NAME)
10797 arg = SSA_NAME_VAR (arg);
10798
10799 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10800 or __builtin_next_arg (0) the first time we see it, after checking
10801 the arguments and if needed issuing a warning. */
10802 if (!integer_zerop (arg))
10803 {
10804 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10805
10806 /* Strip off all nops for the sake of the comparison. This
10807 is not quite the same as STRIP_NOPS. It does more.
10808 We must also strip off INDIRECT_EXPR for C++ reference
10809 parameters. */
10810 while (CONVERT_EXPR_P (arg)
10811 || TREE_CODE (arg) == INDIRECT_REF)
10812 arg = TREE_OPERAND (arg, 0);
10813 if (arg != last_parm)
10814 {
10815 /* FIXME: Sometimes with the tree optimizers we can get the
10816 not the last argument even though the user used the last
10817 argument. We just warn and set the arg to be the last
10818 argument so that we will get wrong-code because of
10819 it. */
10820 warning_at (current_location,
10821 OPT_Wvarargs,
10822 "second parameter of %<va_start%> not last named argument");
10823 }
10824
10825 /* Undefined by C99 7.15.1.4p4 (va_start):
10826 "If the parameter parmN is declared with the register storage
10827 class, with a function or array type, or with a type that is
10828 not compatible with the type that results after application of
10829 the default argument promotions, the behavior is undefined."
10830 */
10831 else if (DECL_REGISTER (arg))
10832 {
10833 warning_at (current_location,
10834 OPT_Wvarargs,
10835 "undefined behaviour when second parameter of "
10836 "%<va_start%> is declared with %<register%> storage");
10837 }
10838
10839 /* We want to verify the second parameter just once before the tree
10840 optimizers are run and then avoid keeping it in the tree,
10841 as otherwise we could warn even for correct code like:
10842 void foo (int i, ...)
10843 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10844 if (va_start_p)
10845 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10846 else
10847 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10848 }
10849 return false;
10850 }
10851
10852
10853 /* Expand a call EXP to __builtin_object_size. */
10854
10855 static rtx
10856 expand_builtin_object_size (tree exp)
10857 {
10858 tree ost;
10859 int object_size_type;
10860 tree fndecl = get_callee_fndecl (exp);
10861
10862 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10863 {
10864 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10865 exp, fndecl);
10866 expand_builtin_trap ();
10867 return const0_rtx;
10868 }
10869
10870 ost = CALL_EXPR_ARG (exp, 1);
10871 STRIP_NOPS (ost);
10872
10873 if (TREE_CODE (ost) != INTEGER_CST
10874 || tree_int_cst_sgn (ost) < 0
10875 || compare_tree_int (ost, 3) > 0)
10876 {
10877 error ("%Klast argument of %D is not integer constant between 0 and 3",
10878 exp, fndecl);
10879 expand_builtin_trap ();
10880 return const0_rtx;
10881 }
10882
10883 object_size_type = tree_to_shwi (ost);
10884
10885 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10886 }
10887
10888 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10889 FCODE is the BUILT_IN_* to use.
10890 Return NULL_RTX if we failed; the caller should emit a normal call,
10891 otherwise try to get the result in TARGET, if convenient (and in
10892 mode MODE if that's convenient). */
10893
10894 static rtx
10895 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10896 enum built_in_function fcode)
10897 {
10898 tree dest, src, len, size;
10899
10900 if (!validate_arglist (exp,
10901 POINTER_TYPE,
10902 fcode == BUILT_IN_MEMSET_CHK
10903 ? INTEGER_TYPE : POINTER_TYPE,
10904 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10905 return NULL_RTX;
10906
10907 dest = CALL_EXPR_ARG (exp, 0);
10908 src = CALL_EXPR_ARG (exp, 1);
10909 len = CALL_EXPR_ARG (exp, 2);
10910 size = CALL_EXPR_ARG (exp, 3);
10911
10912 if (! tree_fits_uhwi_p (size))
10913 return NULL_RTX;
10914
10915 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10916 {
10917 tree fn;
10918
10919 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
10920 {
10921 warning_at (tree_nonartificial_location (exp),
10922 0, "%Kcall to %D will always overflow destination buffer",
10923 exp, get_callee_fndecl (exp));
10924 return NULL_RTX;
10925 }
10926
10927 fn = NULL_TREE;
10928 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10929 mem{cpy,pcpy,move,set} is available. */
10930 switch (fcode)
10931 {
10932 case BUILT_IN_MEMCPY_CHK:
10933 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10934 break;
10935 case BUILT_IN_MEMPCPY_CHK:
10936 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10937 break;
10938 case BUILT_IN_MEMMOVE_CHK:
10939 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10940 break;
10941 case BUILT_IN_MEMSET_CHK:
10942 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10943 break;
10944 default:
10945 break;
10946 }
10947
10948 if (! fn)
10949 return NULL_RTX;
10950
10951 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10952 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10953 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10954 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10955 }
10956 else if (fcode == BUILT_IN_MEMSET_CHK)
10957 return NULL_RTX;
10958 else
10959 {
10960 unsigned int dest_align = get_pointer_alignment (dest);
10961
10962 /* If DEST is not a pointer type, call the normal function. */
10963 if (dest_align == 0)
10964 return NULL_RTX;
10965
10966 /* If SRC and DEST are the same (and not volatile), do nothing. */
10967 if (operand_equal_p (src, dest, 0))
10968 {
10969 tree expr;
10970
10971 if (fcode != BUILT_IN_MEMPCPY_CHK)
10972 {
10973 /* Evaluate and ignore LEN in case it has side-effects. */
10974 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10975 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10976 }
10977
10978 expr = fold_build_pointer_plus (dest, len);
10979 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10980 }
10981
10982 /* __memmove_chk special case. */
10983 if (fcode == BUILT_IN_MEMMOVE_CHK)
10984 {
10985 unsigned int src_align = get_pointer_alignment (src);
10986
10987 if (src_align == 0)
10988 return NULL_RTX;
10989
10990 /* If src is categorized for a readonly section we can use
10991 normal __memcpy_chk. */
10992 if (readonly_data_expr (src))
10993 {
10994 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10995 if (!fn)
10996 return NULL_RTX;
10997 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10998 dest, src, len, size);
10999 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11000 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11001 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11002 }
11003 }
11004 return NULL_RTX;
11005 }
11006 }
11007
11008 /* Emit warning if a buffer overflow is detected at compile time. */
11009
11010 static void
11011 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11012 {
11013 int is_strlen = 0;
11014 tree len, size;
11015 location_t loc = tree_nonartificial_location (exp);
11016
11017 switch (fcode)
11018 {
11019 case BUILT_IN_STRCPY_CHK:
11020 case BUILT_IN_STPCPY_CHK:
11021 /* For __strcat_chk the warning will be emitted only if overflowing
11022 by at least strlen (dest) + 1 bytes. */
11023 case BUILT_IN_STRCAT_CHK:
11024 len = CALL_EXPR_ARG (exp, 1);
11025 size = CALL_EXPR_ARG (exp, 2);
11026 is_strlen = 1;
11027 break;
11028 case BUILT_IN_STRNCAT_CHK:
11029 case BUILT_IN_STRNCPY_CHK:
11030 case BUILT_IN_STPNCPY_CHK:
11031 len = CALL_EXPR_ARG (exp, 2);
11032 size = CALL_EXPR_ARG (exp, 3);
11033 break;
11034 case BUILT_IN_SNPRINTF_CHK:
11035 case BUILT_IN_VSNPRINTF_CHK:
11036 len = CALL_EXPR_ARG (exp, 1);
11037 size = CALL_EXPR_ARG (exp, 3);
11038 break;
11039 default:
11040 gcc_unreachable ();
11041 }
11042
11043 if (!len || !size)
11044 return;
11045
11046 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11047 return;
11048
11049 if (is_strlen)
11050 {
11051 len = c_strlen (len, 1);
11052 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11053 return;
11054 }
11055 else if (fcode == BUILT_IN_STRNCAT_CHK)
11056 {
11057 tree src = CALL_EXPR_ARG (exp, 1);
11058 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
11059 return;
11060 src = c_strlen (src, 1);
11061 if (! src || ! tree_fits_uhwi_p (src))
11062 {
11063 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11064 exp, get_callee_fndecl (exp));
11065 return;
11066 }
11067 else if (tree_int_cst_lt (src, size))
11068 return;
11069 }
11070 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
11071 return;
11072
11073 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11074 exp, get_callee_fndecl (exp));
11075 }
11076
11077 /* Emit warning if a buffer overflow is detected at compile time
11078 in __sprintf_chk/__vsprintf_chk calls. */
11079
11080 static void
11081 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11082 {
11083 tree size, len, fmt;
11084 const char *fmt_str;
11085 int nargs = call_expr_nargs (exp);
11086
11087 /* Verify the required arguments in the original call. */
11088
11089 if (nargs < 4)
11090 return;
11091 size = CALL_EXPR_ARG (exp, 2);
11092 fmt = CALL_EXPR_ARG (exp, 3);
11093
11094 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11095 return;
11096
11097 /* Check whether the format is a literal string constant. */
11098 fmt_str = c_getstr (fmt);
11099 if (fmt_str == NULL)
11100 return;
11101
11102 if (!init_target_chars ())
11103 return;
11104
11105 /* If the format doesn't contain % args or %%, we know its size. */
11106 if (strchr (fmt_str, target_percent) == 0)
11107 len = build_int_cstu (size_type_node, strlen (fmt_str));
11108 /* If the format is "%s" and first ... argument is a string literal,
11109 we know it too. */
11110 else if (fcode == BUILT_IN_SPRINTF_CHK
11111 && strcmp (fmt_str, target_percent_s) == 0)
11112 {
11113 tree arg;
11114
11115 if (nargs < 5)
11116 return;
11117 arg = CALL_EXPR_ARG (exp, 4);
11118 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11119 return;
11120
11121 len = c_strlen (arg, 1);
11122 if (!len || ! tree_fits_uhwi_p (len))
11123 return;
11124 }
11125 else
11126 return;
11127
11128 if (! tree_int_cst_lt (len, size))
11129 warning_at (tree_nonartificial_location (exp),
11130 0, "%Kcall to %D will always overflow destination buffer",
11131 exp, get_callee_fndecl (exp));
11132 }
11133
11134 /* Emit warning if a free is called with address of a variable. */
11135
11136 static void
11137 maybe_emit_free_warning (tree exp)
11138 {
11139 tree arg = CALL_EXPR_ARG (exp, 0);
11140
11141 STRIP_NOPS (arg);
11142 if (TREE_CODE (arg) != ADDR_EXPR)
11143 return;
11144
11145 arg = get_base_address (TREE_OPERAND (arg, 0));
11146 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11147 return;
11148
11149 if (SSA_VAR_P (arg))
11150 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11151 "%Kattempt to free a non-heap object %qD", exp, arg);
11152 else
11153 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11154 "%Kattempt to free a non-heap object", exp);
11155 }
11156
11157 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11158 if possible. */
11159
11160 static tree
11161 fold_builtin_object_size (tree ptr, tree ost)
11162 {
11163 unsigned HOST_WIDE_INT bytes;
11164 int object_size_type;
11165
11166 if (!validate_arg (ptr, POINTER_TYPE)
11167 || !validate_arg (ost, INTEGER_TYPE))
11168 return NULL_TREE;
11169
11170 STRIP_NOPS (ost);
11171
11172 if (TREE_CODE (ost) != INTEGER_CST
11173 || tree_int_cst_sgn (ost) < 0
11174 || compare_tree_int (ost, 3) > 0)
11175 return NULL_TREE;
11176
11177 object_size_type = tree_to_shwi (ost);
11178
11179 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11180 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11181 and (size_t) 0 for types 2 and 3. */
11182 if (TREE_SIDE_EFFECTS (ptr))
11183 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11184
11185 if (TREE_CODE (ptr) == ADDR_EXPR)
11186 {
11187 bytes = compute_builtin_object_size (ptr, object_size_type);
11188 if (wi::fits_to_tree_p (bytes, size_type_node))
11189 return build_int_cstu (size_type_node, bytes);
11190 }
11191 else if (TREE_CODE (ptr) == SSA_NAME)
11192 {
11193 /* If object size is not known yet, delay folding until
11194 later. Maybe subsequent passes will help determining
11195 it. */
11196 bytes = compute_builtin_object_size (ptr, object_size_type);
11197 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11198 && wi::fits_to_tree_p (bytes, size_type_node))
11199 return build_int_cstu (size_type_node, bytes);
11200 }
11201
11202 return NULL_TREE;
11203 }
11204
11205 /* Builtins with folding operations that operate on "..." arguments
11206 need special handling; we need to store the arguments in a convenient
11207 data structure before attempting any folding. Fortunately there are
11208 only a few builtins that fall into this category. FNDECL is the
11209 function, EXP is the CALL_EXPR for the call. */
11210
11211 static tree
11212 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11213 {
11214 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11215 tree ret = NULL_TREE;
11216
11217 switch (fcode)
11218 {
11219 case BUILT_IN_FPCLASSIFY:
11220 ret = fold_builtin_fpclassify (loc, args, nargs);
11221 break;
11222
11223 default:
11224 break;
11225 }
11226 if (ret)
11227 {
11228 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11229 SET_EXPR_LOCATION (ret, loc);
11230 TREE_NO_WARNING (ret) = 1;
11231 return ret;
11232 }
11233 return NULL_TREE;
11234 }
11235
11236 /* Initialize format string characters in the target charset. */
11237
11238 bool
11239 init_target_chars (void)
11240 {
11241 static bool init;
11242 if (!init)
11243 {
11244 target_newline = lang_hooks.to_target_charset ('\n');
11245 target_percent = lang_hooks.to_target_charset ('%');
11246 target_c = lang_hooks.to_target_charset ('c');
11247 target_s = lang_hooks.to_target_charset ('s');
11248 if (target_newline == 0 || target_percent == 0 || target_c == 0
11249 || target_s == 0)
11250 return false;
11251
11252 target_percent_c[0] = target_percent;
11253 target_percent_c[1] = target_c;
11254 target_percent_c[2] = '\0';
11255
11256 target_percent_s[0] = target_percent;
11257 target_percent_s[1] = target_s;
11258 target_percent_s[2] = '\0';
11259
11260 target_percent_s_newline[0] = target_percent;
11261 target_percent_s_newline[1] = target_s;
11262 target_percent_s_newline[2] = target_newline;
11263 target_percent_s_newline[3] = '\0';
11264
11265 init = true;
11266 }
11267 return true;
11268 }
11269
11270 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11271 and no overflow/underflow occurred. INEXACT is true if M was not
11272 exactly calculated. TYPE is the tree type for the result. This
11273 function assumes that you cleared the MPFR flags and then
11274 calculated M to see if anything subsequently set a flag prior to
11275 entering this function. Return NULL_TREE if any checks fail. */
11276
11277 static tree
11278 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11279 {
11280 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11281 overflow/underflow occurred. If -frounding-math, proceed iff the
11282 result of calling FUNC was exact. */
11283 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11284 && (!flag_rounding_math || !inexact))
11285 {
11286 REAL_VALUE_TYPE rr;
11287
11288 real_from_mpfr (&rr, m, type, GMP_RNDN);
11289 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11290 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11291 but the mpft_t is not, then we underflowed in the
11292 conversion. */
11293 if (real_isfinite (&rr)
11294 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11295 {
11296 REAL_VALUE_TYPE rmode;
11297
11298 real_convert (&rmode, TYPE_MODE (type), &rr);
11299 /* Proceed iff the specified mode can hold the value. */
11300 if (real_identical (&rmode, &rr))
11301 return build_real (type, rmode);
11302 }
11303 }
11304 return NULL_TREE;
11305 }
11306
11307 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11308 number and no overflow/underflow occurred. INEXACT is true if M
11309 was not exactly calculated. TYPE is the tree type for the result.
11310 This function assumes that you cleared the MPFR flags and then
11311 calculated M to see if anything subsequently set a flag prior to
11312 entering this function. Return NULL_TREE if any checks fail, if
11313 FORCE_CONVERT is true, then bypass the checks. */
11314
11315 static tree
11316 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11317 {
11318 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11319 overflow/underflow occurred. If -frounding-math, proceed iff the
11320 result of calling FUNC was exact. */
11321 if (force_convert
11322 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11323 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11324 && (!flag_rounding_math || !inexact)))
11325 {
11326 REAL_VALUE_TYPE re, im;
11327
11328 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11329 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
11330 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11331 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11332 but the mpft_t is not, then we underflowed in the
11333 conversion. */
11334 if (force_convert
11335 || (real_isfinite (&re) && real_isfinite (&im)
11336 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11337 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11338 {
11339 REAL_VALUE_TYPE re_mode, im_mode;
11340
11341 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11342 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11343 /* Proceed iff the specified mode can hold the value. */
11344 if (force_convert
11345 || (real_identical (&re_mode, &re)
11346 && real_identical (&im_mode, &im)))
11347 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11348 build_real (TREE_TYPE (type), im_mode));
11349 }
11350 }
11351 return NULL_TREE;
11352 }
11353
11354 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11355 FUNC on it and return the resulting value as a tree with type TYPE.
11356 If MIN and/or MAX are not NULL, then the supplied ARG must be
11357 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11358 acceptable values, otherwise they are not. The mpfr precision is
11359 set to the precision of TYPE. We assume that function FUNC returns
11360 zero if the result could be calculated exactly within the requested
11361 precision. */
11362
11363 static tree
11364 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11365 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11366 bool inclusive)
11367 {
11368 tree result = NULL_TREE;
11369
11370 STRIP_NOPS (arg);
11371
11372 /* To proceed, MPFR must exactly represent the target floating point
11373 format, which only happens when the target base equals two. */
11374 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11375 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
11376 {
11377 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11378
11379 if (real_isfinite (ra)
11380 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11381 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
11382 {
11383 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11384 const int prec = fmt->p;
11385 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11386 int inexact;
11387 mpfr_t m;
11388
11389 mpfr_init2 (m, prec);
11390 mpfr_from_real (m, ra, GMP_RNDN);
11391 mpfr_clear_flags ();
11392 inexact = func (m, m, rnd);
11393 result = do_mpfr_ckconv (m, type, inexact);
11394 mpfr_clear (m);
11395 }
11396 }
11397
11398 return result;
11399 }
11400
11401 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11402 FUNC on it and return the resulting value as a tree with type TYPE.
11403 The mpfr precision is set to the precision of TYPE. We assume that
11404 function FUNC returns zero if the result could be calculated
11405 exactly within the requested precision. */
11406
11407 static tree
11408 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11409 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11410 {
11411 tree result = NULL_TREE;
11412
11413 STRIP_NOPS (arg1);
11414 STRIP_NOPS (arg2);
11415
11416 /* To proceed, MPFR must exactly represent the target floating point
11417 format, which only happens when the target base equals two. */
11418 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11419 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11420 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11421 {
11422 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11423 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11424
11425 if (real_isfinite (ra1) && real_isfinite (ra2))
11426 {
11427 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11428 const int prec = fmt->p;
11429 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11430 int inexact;
11431 mpfr_t m1, m2;
11432
11433 mpfr_inits2 (prec, m1, m2, NULL);
11434 mpfr_from_real (m1, ra1, GMP_RNDN);
11435 mpfr_from_real (m2, ra2, GMP_RNDN);
11436 mpfr_clear_flags ();
11437 inexact = func (m1, m1, m2, rnd);
11438 result = do_mpfr_ckconv (m1, type, inexact);
11439 mpfr_clears (m1, m2, NULL);
11440 }
11441 }
11442
11443 return result;
11444 }
11445
11446 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11447 FUNC on it and return the resulting value as a tree with type TYPE.
11448 The mpfr precision is set to the precision of TYPE. We assume that
11449 function FUNC returns zero if the result could be calculated
11450 exactly within the requested precision. */
11451
11452 static tree
11453 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11454 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11455 {
11456 tree result = NULL_TREE;
11457
11458 STRIP_NOPS (arg1);
11459 STRIP_NOPS (arg2);
11460 STRIP_NOPS (arg3);
11461
11462 /* To proceed, MPFR must exactly represent the target floating point
11463 format, which only happens when the target base equals two. */
11464 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11465 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11466 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11467 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
11468 {
11469 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11470 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11471 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11472
11473 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
11474 {
11475 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11476 const int prec = fmt->p;
11477 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11478 int inexact;
11479 mpfr_t m1, m2, m3;
11480
11481 mpfr_inits2 (prec, m1, m2, m3, NULL);
11482 mpfr_from_real (m1, ra1, GMP_RNDN);
11483 mpfr_from_real (m2, ra2, GMP_RNDN);
11484 mpfr_from_real (m3, ra3, GMP_RNDN);
11485 mpfr_clear_flags ();
11486 inexact = func (m1, m1, m2, m3, rnd);
11487 result = do_mpfr_ckconv (m1, type, inexact);
11488 mpfr_clears (m1, m2, m3, NULL);
11489 }
11490 }
11491
11492 return result;
11493 }
11494
11495 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11496 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11497 If ARG_SINP and ARG_COSP are NULL then the result is returned
11498 as a complex value.
11499 The type is taken from the type of ARG and is used for setting the
11500 precision of the calculation and results. */
11501
11502 static tree
11503 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11504 {
11505 tree const type = TREE_TYPE (arg);
11506 tree result = NULL_TREE;
11507
11508 STRIP_NOPS (arg);
11509
11510 /* To proceed, MPFR must exactly represent the target floating point
11511 format, which only happens when the target base equals two. */
11512 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11513 && TREE_CODE (arg) == REAL_CST
11514 && !TREE_OVERFLOW (arg))
11515 {
11516 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11517
11518 if (real_isfinite (ra))
11519 {
11520 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11521 const int prec = fmt->p;
11522 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11523 tree result_s, result_c;
11524 int inexact;
11525 mpfr_t m, ms, mc;
11526
11527 mpfr_inits2 (prec, m, ms, mc, NULL);
11528 mpfr_from_real (m, ra, GMP_RNDN);
11529 mpfr_clear_flags ();
11530 inexact = mpfr_sin_cos (ms, mc, m, rnd);
11531 result_s = do_mpfr_ckconv (ms, type, inexact);
11532 result_c = do_mpfr_ckconv (mc, type, inexact);
11533 mpfr_clears (m, ms, mc, NULL);
11534 if (result_s && result_c)
11535 {
11536 /* If we are to return in a complex value do so. */
11537 if (!arg_sinp && !arg_cosp)
11538 return build_complex (build_complex_type (type),
11539 result_c, result_s);
11540
11541 /* Dereference the sin/cos pointer arguments. */
11542 arg_sinp = build_fold_indirect_ref (arg_sinp);
11543 arg_cosp = build_fold_indirect_ref (arg_cosp);
11544 /* Proceed if valid pointer type were passed in. */
11545 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11546 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11547 {
11548 /* Set the values. */
11549 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
11550 result_s);
11551 TREE_SIDE_EFFECTS (result_s) = 1;
11552 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
11553 result_c);
11554 TREE_SIDE_EFFECTS (result_c) = 1;
11555 /* Combine the assignments into a compound expr. */
11556 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11557 result_s, result_c));
11558 }
11559 }
11560 }
11561 }
11562 return result;
11563 }
11564
11565 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11566 two-argument mpfr order N Bessel function FUNC on them and return
11567 the resulting value as a tree with type TYPE. The mpfr precision
11568 is set to the precision of TYPE. We assume that function FUNC
11569 returns zero if the result could be calculated exactly within the
11570 requested precision. */
11571 static tree
11572 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11573 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11574 const REAL_VALUE_TYPE *min, bool inclusive)
11575 {
11576 tree result = NULL_TREE;
11577
11578 STRIP_NOPS (arg1);
11579 STRIP_NOPS (arg2);
11580
11581 /* To proceed, MPFR must exactly represent the target floating point
11582 format, which only happens when the target base equals two. */
11583 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11584 && tree_fits_shwi_p (arg1)
11585 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11586 {
11587 const HOST_WIDE_INT n = tree_to_shwi (arg1);
11588 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11589
11590 if (n == (long)n
11591 && real_isfinite (ra)
11592 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11593 {
11594 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11595 const int prec = fmt->p;
11596 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11597 int inexact;
11598 mpfr_t m;
11599
11600 mpfr_init2 (m, prec);
11601 mpfr_from_real (m, ra, GMP_RNDN);
11602 mpfr_clear_flags ();
11603 inexact = func (m, n, m, rnd);
11604 result = do_mpfr_ckconv (m, type, inexact);
11605 mpfr_clear (m);
11606 }
11607 }
11608
11609 return result;
11610 }
11611
11612 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11613 the pointer *(ARG_QUO) and return the result. The type is taken
11614 from the type of ARG0 and is used for setting the precision of the
11615 calculation and results. */
11616
11617 static tree
11618 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11619 {
11620 tree const type = TREE_TYPE (arg0);
11621 tree result = NULL_TREE;
11622
11623 STRIP_NOPS (arg0);
11624 STRIP_NOPS (arg1);
11625
11626 /* To proceed, MPFR must exactly represent the target floating point
11627 format, which only happens when the target base equals two. */
11628 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11629 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11630 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11631 {
11632 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11633 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11634
11635 if (real_isfinite (ra0) && real_isfinite (ra1))
11636 {
11637 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11638 const int prec = fmt->p;
11639 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11640 tree result_rem;
11641 long integer_quo;
11642 mpfr_t m0, m1;
11643
11644 mpfr_inits2 (prec, m0, m1, NULL);
11645 mpfr_from_real (m0, ra0, GMP_RNDN);
11646 mpfr_from_real (m1, ra1, GMP_RNDN);
11647 mpfr_clear_flags ();
11648 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11649 /* Remquo is independent of the rounding mode, so pass
11650 inexact=0 to do_mpfr_ckconv(). */
11651 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11652 mpfr_clears (m0, m1, NULL);
11653 if (result_rem)
11654 {
11655 /* MPFR calculates quo in the host's long so it may
11656 return more bits in quo than the target int can hold
11657 if sizeof(host long) > sizeof(target int). This can
11658 happen even for native compilers in LP64 mode. In
11659 these cases, modulo the quo value with the largest
11660 number that the target int can hold while leaving one
11661 bit for the sign. */
11662 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11663 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11664
11665 /* Dereference the quo pointer argument. */
11666 arg_quo = build_fold_indirect_ref (arg_quo);
11667 /* Proceed iff a valid pointer type was passed in. */
11668 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11669 {
11670 /* Set the value. */
11671 tree result_quo
11672 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11673 build_int_cst (TREE_TYPE (arg_quo),
11674 integer_quo));
11675 TREE_SIDE_EFFECTS (result_quo) = 1;
11676 /* Combine the quo assignment with the rem. */
11677 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11678 result_quo, result_rem));
11679 }
11680 }
11681 }
11682 }
11683 return result;
11684 }
11685
11686 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11687 resulting value as a tree with type TYPE. The mpfr precision is
11688 set to the precision of TYPE. We assume that this mpfr function
11689 returns zero if the result could be calculated exactly within the
11690 requested precision. In addition, the integer pointer represented
11691 by ARG_SG will be dereferenced and set to the appropriate signgam
11692 (-1,1) value. */
11693
11694 static tree
11695 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11696 {
11697 tree result = NULL_TREE;
11698
11699 STRIP_NOPS (arg);
11700
11701 /* To proceed, MPFR must exactly represent the target floating point
11702 format, which only happens when the target base equals two. Also
11703 verify ARG is a constant and that ARG_SG is an int pointer. */
11704 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11705 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11706 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11707 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11708 {
11709 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11710
11711 /* In addition to NaN and Inf, the argument cannot be zero or a
11712 negative integer. */
11713 if (real_isfinite (ra)
11714 && ra->cl != rvc_zero
11715 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11716 {
11717 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11718 const int prec = fmt->p;
11719 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
11720 int inexact, sg;
11721 mpfr_t m;
11722 tree result_lg;
11723
11724 mpfr_init2 (m, prec);
11725 mpfr_from_real (m, ra, GMP_RNDN);
11726 mpfr_clear_flags ();
11727 inexact = mpfr_lgamma (m, &sg, m, rnd);
11728 result_lg = do_mpfr_ckconv (m, type, inexact);
11729 mpfr_clear (m);
11730 if (result_lg)
11731 {
11732 tree result_sg;
11733
11734 /* Dereference the arg_sg pointer argument. */
11735 arg_sg = build_fold_indirect_ref (arg_sg);
11736 /* Assign the signgam value into *arg_sg. */
11737 result_sg = fold_build2 (MODIFY_EXPR,
11738 TREE_TYPE (arg_sg), arg_sg,
11739 build_int_cst (TREE_TYPE (arg_sg), sg));
11740 TREE_SIDE_EFFECTS (result_sg) = 1;
11741 /* Combine the signgam assignment with the lgamma result. */
11742 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11743 result_sg, result_lg));
11744 }
11745 }
11746 }
11747
11748 return result;
11749 }
11750
11751 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11752 function FUNC on it and return the resulting value as a tree with
11753 type TYPE. The mpfr precision is set to the precision of TYPE. We
11754 assume that function FUNC returns zero if the result could be
11755 calculated exactly within the requested precision. */
11756
11757 static tree
11758 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
11759 {
11760 tree result = NULL_TREE;
11761
11762 STRIP_NOPS (arg);
11763
11764 /* To proceed, MPFR must exactly represent the target floating point
11765 format, which only happens when the target base equals two. */
11766 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
11767 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
11768 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
11769 {
11770 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
11771 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
11772
11773 if (real_isfinite (re) && real_isfinite (im))
11774 {
11775 const struct real_format *const fmt =
11776 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11777 const int prec = fmt->p;
11778 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11779 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11780 int inexact;
11781 mpc_t m;
11782
11783 mpc_init2 (m, prec);
11784 mpfr_from_real (mpc_realref (m), re, rnd);
11785 mpfr_from_real (mpc_imagref (m), im, rnd);
11786 mpfr_clear_flags ();
11787 inexact = func (m, m, crnd);
11788 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
11789 mpc_clear (m);
11790 }
11791 }
11792
11793 return result;
11794 }
11795
11796 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11797 mpc function FUNC on it and return the resulting value as a tree
11798 with type TYPE. The mpfr precision is set to the precision of
11799 TYPE. We assume that function FUNC returns zero if the result
11800 could be calculated exactly within the requested precision. If
11801 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11802 in the arguments and/or results. */
11803
11804 tree
11805 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11806 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11807 {
11808 tree result = NULL_TREE;
11809
11810 STRIP_NOPS (arg0);
11811 STRIP_NOPS (arg1);
11812
11813 /* To proceed, MPFR must exactly represent the target floating point
11814 format, which only happens when the target base equals two. */
11815 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11816 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11817 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11818 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11819 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11820 {
11821 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11822 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11823 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11824 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11825
11826 if (do_nonfinite
11827 || (real_isfinite (re0) && real_isfinite (im0)
11828 && real_isfinite (re1) && real_isfinite (im1)))
11829 {
11830 const struct real_format *const fmt =
11831 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11832 const int prec = fmt->p;
11833 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11834 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11835 int inexact;
11836 mpc_t m0, m1;
11837
11838 mpc_init2 (m0, prec);
11839 mpc_init2 (m1, prec);
11840 mpfr_from_real (mpc_realref (m0), re0, rnd);
11841 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11842 mpfr_from_real (mpc_realref (m1), re1, rnd);
11843 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11844 mpfr_clear_flags ();
11845 inexact = func (m0, m0, m1, crnd);
11846 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11847 mpc_clear (m0);
11848 mpc_clear (m1);
11849 }
11850 }
11851
11852 return result;
11853 }
11854
11855 /* A wrapper function for builtin folding that prevents warnings for
11856 "statement without effect" and the like, caused by removing the
11857 call node earlier than the warning is generated. */
11858
11859 tree
11860 fold_call_stmt (gcall *stmt, bool ignore)
11861 {
11862 tree ret = NULL_TREE;
11863 tree fndecl = gimple_call_fndecl (stmt);
11864 location_t loc = gimple_location (stmt);
11865 if (fndecl
11866 && TREE_CODE (fndecl) == FUNCTION_DECL
11867 && DECL_BUILT_IN (fndecl)
11868 && !gimple_call_va_arg_pack_p (stmt))
11869 {
11870 int nargs = gimple_call_num_args (stmt);
11871 tree *args = (nargs > 0
11872 ? gimple_call_arg_ptr (stmt, 0)
11873 : &error_mark_node);
11874
11875 if (avoid_folding_inline_builtin (fndecl))
11876 return NULL_TREE;
11877 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11878 {
11879 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11880 }
11881 else
11882 {
11883 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11884 if (ret)
11885 {
11886 /* Propagate location information from original call to
11887 expansion of builtin. Otherwise things like
11888 maybe_emit_chk_warning, that operate on the expansion
11889 of a builtin, will use the wrong location information. */
11890 if (gimple_has_location (stmt))
11891 {
11892 tree realret = ret;
11893 if (TREE_CODE (ret) == NOP_EXPR)
11894 realret = TREE_OPERAND (ret, 0);
11895 if (CAN_HAVE_LOCATION_P (realret)
11896 && !EXPR_HAS_LOCATION (realret))
11897 SET_EXPR_LOCATION (realret, loc);
11898 return realret;
11899 }
11900 return ret;
11901 }
11902 }
11903 }
11904 return NULL_TREE;
11905 }
11906
11907 /* Look up the function in builtin_decl that corresponds to DECL
11908 and set ASMSPEC as its user assembler name. DECL must be a
11909 function decl that declares a builtin. */
11910
11911 void
11912 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11913 {
11914 tree builtin;
11915 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
11916 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
11917 && asmspec != 0);
11918
11919 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11920 set_user_assembler_name (builtin, asmspec);
11921 switch (DECL_FUNCTION_CODE (decl))
11922 {
11923 case BUILT_IN_MEMCPY:
11924 init_block_move_fn (asmspec);
11925 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
11926 break;
11927 case BUILT_IN_MEMSET:
11928 init_block_clear_fn (asmspec);
11929 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
11930 break;
11931 case BUILT_IN_MEMMOVE:
11932 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
11933 break;
11934 case BUILT_IN_MEMCMP:
11935 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
11936 break;
11937 case BUILT_IN_ABORT:
11938 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
11939 break;
11940 case BUILT_IN_FFS:
11941 if (INT_TYPE_SIZE < BITS_PER_WORD)
11942 {
11943 set_user_assembler_libfunc ("ffs", asmspec);
11944 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
11945 MODE_INT, 0), "ffs");
11946 }
11947 break;
11948 default:
11949 break;
11950 }
11951 }
11952
11953 /* Return true if DECL is a builtin that expands to a constant or similarly
11954 simple code. */
11955 bool
11956 is_simple_builtin (tree decl)
11957 {
11958 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11959 switch (DECL_FUNCTION_CODE (decl))
11960 {
11961 /* Builtins that expand to constants. */
11962 case BUILT_IN_CONSTANT_P:
11963 case BUILT_IN_EXPECT:
11964 case BUILT_IN_OBJECT_SIZE:
11965 case BUILT_IN_UNREACHABLE:
11966 /* Simple register moves or loads from stack. */
11967 case BUILT_IN_ASSUME_ALIGNED:
11968 case BUILT_IN_RETURN_ADDRESS:
11969 case BUILT_IN_EXTRACT_RETURN_ADDR:
11970 case BUILT_IN_FROB_RETURN_ADDR:
11971 case BUILT_IN_RETURN:
11972 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11973 case BUILT_IN_FRAME_ADDRESS:
11974 case BUILT_IN_VA_END:
11975 case BUILT_IN_STACK_SAVE:
11976 case BUILT_IN_STACK_RESTORE:
11977 /* Exception state returns or moves registers around. */
11978 case BUILT_IN_EH_FILTER:
11979 case BUILT_IN_EH_POINTER:
11980 case BUILT_IN_EH_COPY_VALUES:
11981 return true;
11982
11983 default:
11984 return false;
11985 }
11986
11987 return false;
11988 }
11989
11990 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11991 most probably expanded inline into reasonably simple code. This is a
11992 superset of is_simple_builtin. */
11993 bool
11994 is_inexpensive_builtin (tree decl)
11995 {
11996 if (!decl)
11997 return false;
11998 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11999 return true;
12000 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12001 switch (DECL_FUNCTION_CODE (decl))
12002 {
12003 case BUILT_IN_ABS:
12004 case BUILT_IN_ALLOCA:
12005 case BUILT_IN_ALLOCA_WITH_ALIGN:
12006 case BUILT_IN_BSWAP16:
12007 case BUILT_IN_BSWAP32:
12008 case BUILT_IN_BSWAP64:
12009 case BUILT_IN_CLZ:
12010 case BUILT_IN_CLZIMAX:
12011 case BUILT_IN_CLZL:
12012 case BUILT_IN_CLZLL:
12013 case BUILT_IN_CTZ:
12014 case BUILT_IN_CTZIMAX:
12015 case BUILT_IN_CTZL:
12016 case BUILT_IN_CTZLL:
12017 case BUILT_IN_FFS:
12018 case BUILT_IN_FFSIMAX:
12019 case BUILT_IN_FFSL:
12020 case BUILT_IN_FFSLL:
12021 case BUILT_IN_IMAXABS:
12022 case BUILT_IN_FINITE:
12023 case BUILT_IN_FINITEF:
12024 case BUILT_IN_FINITEL:
12025 case BUILT_IN_FINITED32:
12026 case BUILT_IN_FINITED64:
12027 case BUILT_IN_FINITED128:
12028 case BUILT_IN_FPCLASSIFY:
12029 case BUILT_IN_ISFINITE:
12030 case BUILT_IN_ISINF_SIGN:
12031 case BUILT_IN_ISINF:
12032 case BUILT_IN_ISINFF:
12033 case BUILT_IN_ISINFL:
12034 case BUILT_IN_ISINFD32:
12035 case BUILT_IN_ISINFD64:
12036 case BUILT_IN_ISINFD128:
12037 case BUILT_IN_ISNAN:
12038 case BUILT_IN_ISNANF:
12039 case BUILT_IN_ISNANL:
12040 case BUILT_IN_ISNAND32:
12041 case BUILT_IN_ISNAND64:
12042 case BUILT_IN_ISNAND128:
12043 case BUILT_IN_ISNORMAL:
12044 case BUILT_IN_ISGREATER:
12045 case BUILT_IN_ISGREATEREQUAL:
12046 case BUILT_IN_ISLESS:
12047 case BUILT_IN_ISLESSEQUAL:
12048 case BUILT_IN_ISLESSGREATER:
12049 case BUILT_IN_ISUNORDERED:
12050 case BUILT_IN_VA_ARG_PACK:
12051 case BUILT_IN_VA_ARG_PACK_LEN:
12052 case BUILT_IN_VA_COPY:
12053 case BUILT_IN_TRAP:
12054 case BUILT_IN_SAVEREGS:
12055 case BUILT_IN_POPCOUNTL:
12056 case BUILT_IN_POPCOUNTLL:
12057 case BUILT_IN_POPCOUNTIMAX:
12058 case BUILT_IN_POPCOUNT:
12059 case BUILT_IN_PARITYL:
12060 case BUILT_IN_PARITYLL:
12061 case BUILT_IN_PARITYIMAX:
12062 case BUILT_IN_PARITY:
12063 case BUILT_IN_LABS:
12064 case BUILT_IN_LLABS:
12065 case BUILT_IN_PREFETCH:
12066 case BUILT_IN_ACC_ON_DEVICE:
12067 return true;
12068
12069 default:
12070 return is_simple_builtin (decl);
12071 }
12072
12073 return false;
12074 }