re PR middle-end/61473 (register sized memmove not inlined)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
62
63
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
65
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
70
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
74
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
77 {
78 #include "builtins.def"
79 };
80 #undef DEF_BUILTIN
81
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
85
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
88
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree, tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static void expand_builtin_update_setjmp_buf (rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strcmp (tree, rtx);
122 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
123 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree fold_builtin_nan (tree, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static bool integer_valued_real_p (tree);
150 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
151 static bool readonly_data_expr (tree);
152 static rtx expand_builtin_fabs (tree, rtx, rtx);
153 static rtx expand_builtin_signbit (tree, rtx);
154 static tree fold_builtin_sqrt (location_t, tree, tree);
155 static tree fold_builtin_cbrt (location_t, tree, tree);
156 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_cos (location_t, tree, tree, tree);
159 static tree fold_builtin_cosh (location_t, tree, tree, tree);
160 static tree fold_builtin_tan (tree, tree);
161 static tree fold_builtin_trunc (location_t, tree, tree);
162 static tree fold_builtin_floor (location_t, tree, tree);
163 static tree fold_builtin_ceil (location_t, tree, tree);
164 static tree fold_builtin_round (location_t, tree, tree);
165 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
166 static tree fold_builtin_bitop (tree, tree);
167 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
168 static tree fold_builtin_strchr (location_t, tree, tree, tree);
169 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
171 static tree fold_builtin_strcmp (location_t, tree, tree);
172 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
173 static tree fold_builtin_signbit (location_t, tree, tree);
174 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
175 static tree fold_builtin_isascii (location_t, tree);
176 static tree fold_builtin_toascii (location_t, tree);
177 static tree fold_builtin_isdigit (location_t, tree);
178 static tree fold_builtin_fabs (location_t, tree, tree);
179 static tree fold_builtin_abs (location_t, tree, tree);
180 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
181 enum tree_code);
182 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
183 static tree fold_builtin_0 (location_t, tree, bool);
184 static tree fold_builtin_1 (location_t, tree, tree, bool);
185 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
186 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
187 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
188 static tree fold_builtin_varargs (location_t, tree, tree, bool);
189
190 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
191 static tree fold_builtin_strstr (location_t, tree, tree, tree);
192 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
193 static tree fold_builtin_strncat (location_t, tree, tree, tree);
194 static tree fold_builtin_strspn (location_t, tree, tree);
195 static tree fold_builtin_strcspn (location_t, tree, tree);
196 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
197 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
198
199 static rtx expand_builtin_object_size (tree);
200 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
201 enum built_in_function);
202 static void maybe_emit_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_free_warning (tree);
205 static tree fold_builtin_object_size (tree, tree);
206 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
207 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
208 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
209 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
210 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
211 enum built_in_function);
212 static bool init_target_chars (void);
213
214 static unsigned HOST_WIDE_INT target_newline;
215 static unsigned HOST_WIDE_INT target_percent;
216 static unsigned HOST_WIDE_INT target_c;
217 static unsigned HOST_WIDE_INT target_s;
218 static char target_percent_c[3];
219 static char target_percent_s[3];
220 static char target_percent_s_newline[4];
221 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_arg2 (tree, tree, tree,
224 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
225 static tree do_mpfr_arg3 (tree, tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_sincos (tree, tree, tree);
228 static tree do_mpfr_bessel_n (tree, tree, tree,
229 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
230 const REAL_VALUE_TYPE *, bool);
231 static tree do_mpfr_remquo (tree, tree, tree);
232 static tree do_mpfr_lgamma_r (tree, tree, tree);
233 static void expand_builtin_sync_synchronize (void);
234
235 /* Return true if NAME starts with __builtin_ or __sync_. */
236
237 static bool
238 is_builtin_name (const char *name)
239 {
240 if (strncmp (name, "__builtin_", 10) == 0)
241 return true;
242 if (strncmp (name, "__sync_", 7) == 0)
243 return true;
244 if (strncmp (name, "__atomic_", 9) == 0)
245 return true;
246 if (flag_cilkplus
247 && (!strcmp (name, "__cilkrts_detach")
248 || !strcmp (name, "__cilkrts_pop_frame")))
249 return true;
250 return false;
251 }
252
253
254 /* Return true if DECL is a function symbol representing a built-in. */
255
256 bool
257 is_builtin_fn (tree decl)
258 {
259 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
260 }
261
262 /* Return true if NODE should be considered for inline expansion regardless
263 of the optimization level. This means whenever a function is invoked with
264 its "internal" name, which normally contains the prefix "__builtin". */
265
266 static bool
267 called_as_built_in (tree node)
268 {
269 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
270 we want the name used to call the function, not the name it
271 will have. */
272 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
273 return is_builtin_name (name);
274 }
275
276 /* Compute values M and N such that M divides (address of EXP - N) and such
277 that N < M. If these numbers can be determined, store M in alignp and N in
278 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
279 *alignp and any bit-offset to *bitposp.
280
281 Note that the address (and thus the alignment) computed here is based
282 on the address to which a symbol resolves, whereas DECL_ALIGN is based
283 on the address at which an object is actually located. These two
284 addresses are not always the same. For example, on ARM targets,
285 the address &foo of a Thumb function foo() has the lowest bit set,
286 whereas foo() itself starts on an even address.
287
288 If ADDR_P is true we are taking the address of the memory reference EXP
289 and thus cannot rely on the access taking place. */
290
291 static bool
292 get_object_alignment_2 (tree exp, unsigned int *alignp,
293 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
294 {
295 HOST_WIDE_INT bitsize, bitpos;
296 tree offset;
297 enum machine_mode mode;
298 int unsignedp, volatilep;
299 unsigned int align = BITS_PER_UNIT;
300 bool known_alignment = false;
301
302 /* Get the innermost object and the constant (bitpos) and possibly
303 variable (offset) offset of the access. */
304 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
305 &mode, &unsignedp, &volatilep, true);
306
307 /* Extract alignment information from the innermost object and
308 possibly adjust bitpos and offset. */
309 if (TREE_CODE (exp) == FUNCTION_DECL)
310 {
311 /* Function addresses can encode extra information besides their
312 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
313 allows the low bit to be used as a virtual bit, we know
314 that the address itself must be at least 2-byte aligned. */
315 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
316 align = 2 * BITS_PER_UNIT;
317 }
318 else if (TREE_CODE (exp) == LABEL_DECL)
319 ;
320 else if (TREE_CODE (exp) == CONST_DECL)
321 {
322 /* The alignment of a CONST_DECL is determined by its initializer. */
323 exp = DECL_INITIAL (exp);
324 align = TYPE_ALIGN (TREE_TYPE (exp));
325 #ifdef CONSTANT_ALIGNMENT
326 if (CONSTANT_CLASS_P (exp))
327 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
328 #endif
329 known_alignment = true;
330 }
331 else if (DECL_P (exp))
332 {
333 align = DECL_ALIGN (exp);
334 known_alignment = true;
335 }
336 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
337 {
338 align = TYPE_ALIGN (TREE_TYPE (exp));
339 }
340 else if (TREE_CODE (exp) == INDIRECT_REF
341 || TREE_CODE (exp) == MEM_REF
342 || TREE_CODE (exp) == TARGET_MEM_REF)
343 {
344 tree addr = TREE_OPERAND (exp, 0);
345 unsigned ptr_align;
346 unsigned HOST_WIDE_INT ptr_bitpos;
347
348 if (TREE_CODE (addr) == BIT_AND_EXPR
349 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
350 {
351 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
352 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
353 align *= BITS_PER_UNIT;
354 addr = TREE_OPERAND (addr, 0);
355 }
356
357 known_alignment
358 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
359 align = MAX (ptr_align, align);
360
361 /* The alignment of the pointer operand in a TARGET_MEM_REF
362 has to take the variable offset parts into account. */
363 if (TREE_CODE (exp) == TARGET_MEM_REF)
364 {
365 if (TMR_INDEX (exp))
366 {
367 unsigned HOST_WIDE_INT step = 1;
368 if (TMR_STEP (exp))
369 step = TREE_INT_CST_LOW (TMR_STEP (exp));
370 align = MIN (align, (step & -step) * BITS_PER_UNIT);
371 }
372 if (TMR_INDEX2 (exp))
373 align = BITS_PER_UNIT;
374 known_alignment = false;
375 }
376
377 /* When EXP is an actual memory reference then we can use
378 TYPE_ALIGN of a pointer indirection to derive alignment.
379 Do so only if get_pointer_alignment_1 did not reveal absolute
380 alignment knowledge and if using that alignment would
381 improve the situation. */
382 if (!addr_p && !known_alignment
383 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
384 align = TYPE_ALIGN (TREE_TYPE (exp));
385 else
386 {
387 /* Else adjust bitpos accordingly. */
388 bitpos += ptr_bitpos;
389 if (TREE_CODE (exp) == MEM_REF
390 || TREE_CODE (exp) == TARGET_MEM_REF)
391 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
392 }
393 }
394 else if (TREE_CODE (exp) == STRING_CST)
395 {
396 /* STRING_CST are the only constant objects we allow to be not
397 wrapped inside a CONST_DECL. */
398 align = TYPE_ALIGN (TREE_TYPE (exp));
399 #ifdef CONSTANT_ALIGNMENT
400 if (CONSTANT_CLASS_P (exp))
401 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
402 #endif
403 known_alignment = true;
404 }
405
406 /* If there is a non-constant offset part extract the maximum
407 alignment that can prevail. */
408 if (offset)
409 {
410 unsigned int trailing_zeros = tree_ctz (offset);
411 if (trailing_zeros < HOST_BITS_PER_INT)
412 {
413 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
414 if (inner)
415 align = MIN (align, inner);
416 }
417 }
418
419 *alignp = align;
420 *bitposp = bitpos & (*alignp - 1);
421 return known_alignment;
422 }
423
424 /* For a memory reference expression EXP compute values M and N such that M
425 divides (&EXP - N) and such that N < M. If these numbers can be determined,
426 store M in alignp and N in *BITPOSP and return true. Otherwise return false
427 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
428
429 bool
430 get_object_alignment_1 (tree exp, unsigned int *alignp,
431 unsigned HOST_WIDE_INT *bitposp)
432 {
433 return get_object_alignment_2 (exp, alignp, bitposp, false);
434 }
435
436 /* Return the alignment in bits of EXP, an object. */
437
438 unsigned int
439 get_object_alignment (tree exp)
440 {
441 unsigned HOST_WIDE_INT bitpos = 0;
442 unsigned int align;
443
444 get_object_alignment_1 (exp, &align, &bitpos);
445
446 /* align and bitpos now specify known low bits of the pointer.
447 ptr & (align - 1) == bitpos. */
448
449 if (bitpos != 0)
450 align = (bitpos & -bitpos);
451 return align;
452 }
453
454 /* For a pointer valued expression EXP compute values M and N such that M
455 divides (EXP - N) and such that N < M. If these numbers can be determined,
456 store M in alignp and N in *BITPOSP and return true. Return false if
457 the results are just a conservative approximation.
458
459 If EXP is not a pointer, false is returned too. */
460
461 bool
462 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
463 unsigned HOST_WIDE_INT *bitposp)
464 {
465 STRIP_NOPS (exp);
466
467 if (TREE_CODE (exp) == ADDR_EXPR)
468 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
469 alignp, bitposp, true);
470 else if (TREE_CODE (exp) == SSA_NAME
471 && POINTER_TYPE_P (TREE_TYPE (exp)))
472 {
473 unsigned int ptr_align, ptr_misalign;
474 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
475
476 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
477 {
478 *bitposp = ptr_misalign * BITS_PER_UNIT;
479 *alignp = ptr_align * BITS_PER_UNIT;
480 /* We cannot really tell whether this result is an approximation. */
481 return true;
482 }
483 else
484 {
485 *bitposp = 0;
486 *alignp = BITS_PER_UNIT;
487 return false;
488 }
489 }
490 else if (TREE_CODE (exp) == INTEGER_CST)
491 {
492 *alignp = BIGGEST_ALIGNMENT;
493 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
494 & (BIGGEST_ALIGNMENT - 1));
495 return true;
496 }
497
498 *bitposp = 0;
499 *alignp = BITS_PER_UNIT;
500 return false;
501 }
502
503 /* Return the alignment in bits of EXP, a pointer valued expression.
504 The alignment returned is, by default, the alignment of the thing that
505 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
506
507 Otherwise, look at the expression to see if we can do better, i.e., if the
508 expression is actually pointing at an object whose alignment is tighter. */
509
510 unsigned int
511 get_pointer_alignment (tree exp)
512 {
513 unsigned HOST_WIDE_INT bitpos = 0;
514 unsigned int align;
515
516 get_pointer_alignment_1 (exp, &align, &bitpos);
517
518 /* align and bitpos now specify known low bits of the pointer.
519 ptr & (align - 1) == bitpos. */
520
521 if (bitpos != 0)
522 align = (bitpos & -bitpos);
523
524 return align;
525 }
526
527 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
528 way, because it could contain a zero byte in the middle.
529 TREE_STRING_LENGTH is the size of the character array, not the string.
530
531 ONLY_VALUE should be nonzero if the result is not going to be emitted
532 into the instruction stream and zero if it is going to be expanded.
533 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
534 is returned, otherwise NULL, since
535 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
536 evaluate the side-effects.
537
538 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
539 accesses. Note that this implies the result is not going to be emitted
540 into the instruction stream.
541
542 The value returned is of type `ssizetype'.
543
544 Unfortunately, string_constant can't access the values of const char
545 arrays with initializers, so neither can we do so here. */
546
547 tree
548 c_strlen (tree src, int only_value)
549 {
550 tree offset_node;
551 HOST_WIDE_INT offset;
552 int max;
553 const char *ptr;
554 location_t loc;
555
556 STRIP_NOPS (src);
557 if (TREE_CODE (src) == COND_EXPR
558 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
559 {
560 tree len1, len2;
561
562 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
563 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
564 if (tree_int_cst_equal (len1, len2))
565 return len1;
566 }
567
568 if (TREE_CODE (src) == COMPOUND_EXPR
569 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
570 return c_strlen (TREE_OPERAND (src, 1), only_value);
571
572 loc = EXPR_LOC_OR_LOC (src, input_location);
573
574 src = string_constant (src, &offset_node);
575 if (src == 0)
576 return NULL_TREE;
577
578 max = TREE_STRING_LENGTH (src) - 1;
579 ptr = TREE_STRING_POINTER (src);
580
581 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
582 {
583 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
584 compute the offset to the following null if we don't know where to
585 start searching for it. */
586 int i;
587
588 for (i = 0; i < max; i++)
589 if (ptr[i] == 0)
590 return NULL_TREE;
591
592 /* We don't know the starting offset, but we do know that the string
593 has no internal zero bytes. We can assume that the offset falls
594 within the bounds of the string; otherwise, the programmer deserves
595 what he gets. Subtract the offset from the length of the string,
596 and return that. This would perhaps not be valid if we were dealing
597 with named arrays in addition to literal string constants. */
598
599 return size_diffop_loc (loc, size_int (max), offset_node);
600 }
601
602 /* We have a known offset into the string. Start searching there for
603 a null character if we can represent it as a single HOST_WIDE_INT. */
604 if (offset_node == 0)
605 offset = 0;
606 else if (! tree_fits_shwi_p (offset_node))
607 offset = -1;
608 else
609 offset = tree_to_shwi (offset_node);
610
611 /* If the offset is known to be out of bounds, warn, and call strlen at
612 runtime. */
613 if (only_value != 2
614 && (offset < 0 || offset > max))
615 {
616 /* Suppress multiple warnings for propagated constant strings. */
617 if (! TREE_NO_WARNING (src))
618 {
619 warning_at (loc, 0, "offset outside bounds of constant string");
620 TREE_NO_WARNING (src) = 1;
621 }
622 return NULL_TREE;
623 }
624
625 /* Use strlen to search for the first zero byte. Since any strings
626 constructed with build_string will have nulls appended, we win even
627 if we get handed something like (char[4])"abcd".
628
629 Since OFFSET is our starting index into the string, no further
630 calculation is needed. */
631 return ssize_int (strlen (ptr + offset));
632 }
633
634 /* Return a char pointer for a C string if it is a string constant
635 or sum of string constant and integer constant. */
636
637 static const char *
638 c_getstr (tree src)
639 {
640 tree offset_node;
641
642 src = string_constant (src, &offset_node);
643 if (src == 0)
644 return 0;
645
646 if (offset_node == 0)
647 return TREE_STRING_POINTER (src);
648 else if (!tree_fits_uhwi_p (offset_node)
649 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
650 return 0;
651
652 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
653 }
654
655 /* Return a constant integer corresponding to target reading
656 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
657
658 static rtx
659 c_readstr (const char *str, enum machine_mode mode)
660 {
661 HOST_WIDE_INT ch;
662 unsigned int i, j;
663 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
664
665 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
666 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
667 / HOST_BITS_PER_WIDE_INT;
668
669 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
670 for (i = 0; i < len; i++)
671 tmp[i] = 0;
672
673 ch = 1;
674 for (i = 0; i < GET_MODE_SIZE (mode); i++)
675 {
676 j = i;
677 if (WORDS_BIG_ENDIAN)
678 j = GET_MODE_SIZE (mode) - i - 1;
679 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
680 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
681 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
682 j *= BITS_PER_UNIT;
683
684 if (ch)
685 ch = (unsigned char) str[i];
686 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
687 }
688
689 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
690 return immed_wide_int_const (c, mode);
691 }
692
693 /* Cast a target constant CST to target CHAR and if that value fits into
694 host char type, return zero and put that value into variable pointed to by
695 P. */
696
697 static int
698 target_char_cast (tree cst, char *p)
699 {
700 unsigned HOST_WIDE_INT val, hostval;
701
702 if (TREE_CODE (cst) != INTEGER_CST
703 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
704 return 1;
705
706 /* Do not care if it fits or not right here. */
707 val = TREE_INT_CST_LOW (cst);
708
709 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
710 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
711
712 hostval = val;
713 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
714 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
715
716 if (val != hostval)
717 return 1;
718
719 *p = hostval;
720 return 0;
721 }
722
723 /* Similar to save_expr, but assumes that arbitrary code is not executed
724 in between the multiple evaluations. In particular, we assume that a
725 non-addressable local variable will not be modified. */
726
727 static tree
728 builtin_save_expr (tree exp)
729 {
730 if (TREE_CODE (exp) == SSA_NAME
731 || (TREE_ADDRESSABLE (exp) == 0
732 && (TREE_CODE (exp) == PARM_DECL
733 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
734 return exp;
735
736 return save_expr (exp);
737 }
738
739 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
740 times to get the address of either a higher stack frame, or a return
741 address located within it (depending on FNDECL_CODE). */
742
743 static rtx
744 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
745 {
746 int i;
747
748 #ifdef INITIAL_FRAME_ADDRESS_RTX
749 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
750 #else
751 rtx tem;
752
753 /* For a zero count with __builtin_return_address, we don't care what
754 frame address we return, because target-specific definitions will
755 override us. Therefore frame pointer elimination is OK, and using
756 the soft frame pointer is OK.
757
758 For a nonzero count, or a zero count with __builtin_frame_address,
759 we require a stable offset from the current frame pointer to the
760 previous one, so we must use the hard frame pointer, and
761 we must disable frame pointer elimination. */
762 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
763 tem = frame_pointer_rtx;
764 else
765 {
766 tem = hard_frame_pointer_rtx;
767
768 /* Tell reload not to eliminate the frame pointer. */
769 crtl->accesses_prior_frames = 1;
770 }
771 #endif
772
773 /* Some machines need special handling before we can access
774 arbitrary frames. For example, on the SPARC, we must first flush
775 all register windows to the stack. */
776 #ifdef SETUP_FRAME_ADDRESSES
777 if (count > 0)
778 SETUP_FRAME_ADDRESSES ();
779 #endif
780
781 /* On the SPARC, the return address is not in the frame, it is in a
782 register. There is no way to access it off of the current frame
783 pointer, but it can be accessed off the previous frame pointer by
784 reading the value from the register window save area. */
785 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
786 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
787 count--;
788 #endif
789
790 /* Scan back COUNT frames to the specified frame. */
791 for (i = 0; i < count; i++)
792 {
793 /* Assume the dynamic chain pointer is in the word that the
794 frame address points to, unless otherwise specified. */
795 #ifdef DYNAMIC_CHAIN_ADDRESS
796 tem = DYNAMIC_CHAIN_ADDRESS (tem);
797 #endif
798 tem = memory_address (Pmode, tem);
799 tem = gen_frame_mem (Pmode, tem);
800 tem = copy_to_reg (tem);
801 }
802
803 /* For __builtin_frame_address, return what we've got. But, on
804 the SPARC for example, we may have to add a bias. */
805 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
806 #ifdef FRAME_ADDR_RTX
807 return FRAME_ADDR_RTX (tem);
808 #else
809 return tem;
810 #endif
811
812 /* For __builtin_return_address, get the return address from that frame. */
813 #ifdef RETURN_ADDR_RTX
814 tem = RETURN_ADDR_RTX (count, tem);
815 #else
816 tem = memory_address (Pmode,
817 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
818 tem = gen_frame_mem (Pmode, tem);
819 #endif
820 return tem;
821 }
822
823 /* Alias set used for setjmp buffer. */
824 static alias_set_type setjmp_alias_set = -1;
825
826 /* Construct the leading half of a __builtin_setjmp call. Control will
827 return to RECEIVER_LABEL. This is also called directly by the SJLJ
828 exception handling code. */
829
830 void
831 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
832 {
833 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
834 rtx stack_save;
835 rtx mem;
836
837 if (setjmp_alias_set == -1)
838 setjmp_alias_set = new_alias_set ();
839
840 buf_addr = convert_memory_address (Pmode, buf_addr);
841
842 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
843
844 /* We store the frame pointer and the address of receiver_label in
845 the buffer and use the rest of it for the stack save area, which
846 is machine-dependent. */
847
848 mem = gen_rtx_MEM (Pmode, buf_addr);
849 set_mem_alias_set (mem, setjmp_alias_set);
850 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
851
852 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
853 GET_MODE_SIZE (Pmode))),
854 set_mem_alias_set (mem, setjmp_alias_set);
855
856 emit_move_insn (validize_mem (mem),
857 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
858
859 stack_save = gen_rtx_MEM (sa_mode,
860 plus_constant (Pmode, buf_addr,
861 2 * GET_MODE_SIZE (Pmode)));
862 set_mem_alias_set (stack_save, setjmp_alias_set);
863 emit_stack_save (SAVE_NONLOCAL, &stack_save);
864
865 /* If there is further processing to do, do it. */
866 #ifdef HAVE_builtin_setjmp_setup
867 if (HAVE_builtin_setjmp_setup)
868 emit_insn (gen_builtin_setjmp_setup (buf_addr));
869 #endif
870
871 /* We have a nonlocal label. */
872 cfun->has_nonlocal_label = 1;
873 }
874
875 /* Construct the trailing part of a __builtin_setjmp call. This is
876 also called directly by the SJLJ exception handling code.
877 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
878
879 void
880 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
881 {
882 rtx chain;
883
884 /* Mark the FP as used when we get here, so we have to make sure it's
885 marked as used by this function. */
886 emit_use (hard_frame_pointer_rtx);
887
888 /* Mark the static chain as clobbered here so life information
889 doesn't get messed up for it. */
890 chain = targetm.calls.static_chain (current_function_decl, true);
891 if (chain && REG_P (chain))
892 emit_clobber (chain);
893
894 /* Now put in the code to restore the frame pointer, and argument
895 pointer, if needed. */
896 #ifdef HAVE_nonlocal_goto
897 if (! HAVE_nonlocal_goto)
898 #endif
899 {
900 /* First adjust our frame pointer to its actual value. It was
901 previously set to the start of the virtual area corresponding to
902 the stacked variables when we branched here and now needs to be
903 adjusted to the actual hardware fp value.
904
905 Assignments to virtual registers are converted by
906 instantiate_virtual_regs into the corresponding assignment
907 to the underlying register (fp in this case) that makes
908 the original assignment true.
909 So the following insn will actually be decrementing fp by
910 STARTING_FRAME_OFFSET. */
911 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
912
913 /* Restoring the frame pointer also modifies the hard frame pointer.
914 Mark it used (so that the previous assignment remains live once
915 the frame pointer is eliminated) and clobbered (to represent the
916 implicit update from the assignment). */
917 emit_use (hard_frame_pointer_rtx);
918 emit_clobber (hard_frame_pointer_rtx);
919 }
920
921 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
922 if (fixed_regs[ARG_POINTER_REGNUM])
923 {
924 #ifdef ELIMINABLE_REGS
925 /* If the argument pointer can be eliminated in favor of the
926 frame pointer, we don't need to restore it. We assume here
927 that if such an elimination is present, it can always be used.
928 This is the case on all known machines; if we don't make this
929 assumption, we do unnecessary saving on many machines. */
930 size_t i;
931 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
932
933 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
934 if (elim_regs[i].from == ARG_POINTER_REGNUM
935 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
936 break;
937
938 if (i == ARRAY_SIZE (elim_regs))
939 #endif
940 {
941 /* Now restore our arg pointer from the address at which it
942 was saved in our stack frame. */
943 emit_move_insn (crtl->args.internal_arg_pointer,
944 copy_to_reg (get_arg_pointer_save_area ()));
945 }
946 }
947 #endif
948
949 #ifdef HAVE_builtin_setjmp_receiver
950 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
951 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
952 else
953 #endif
954 #ifdef HAVE_nonlocal_goto_receiver
955 if (HAVE_nonlocal_goto_receiver)
956 emit_insn (gen_nonlocal_goto_receiver ());
957 else
958 #endif
959 { /* Nothing */ }
960
961 /* We must not allow the code we just generated to be reordered by
962 scheduling. Specifically, the update of the frame pointer must
963 happen immediately, not later. */
964 emit_insn (gen_blockage ());
965 }
966
967 /* __builtin_longjmp is passed a pointer to an array of five words (not
968 all will be used on all machines). It operates similarly to the C
969 library function of the same name, but is more efficient. Much of
970 the code below is copied from the handling of non-local gotos. */
971
972 static void
973 expand_builtin_longjmp (rtx buf_addr, rtx value)
974 {
975 rtx fp, lab, stack, insn, last;
976 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
977
978 /* DRAP is needed for stack realign if longjmp is expanded to current
979 function */
980 if (SUPPORTS_STACK_ALIGNMENT)
981 crtl->need_drap = true;
982
983 if (setjmp_alias_set == -1)
984 setjmp_alias_set = new_alias_set ();
985
986 buf_addr = convert_memory_address (Pmode, buf_addr);
987
988 buf_addr = force_reg (Pmode, buf_addr);
989
990 /* We require that the user must pass a second argument of 1, because
991 that is what builtin_setjmp will return. */
992 gcc_assert (value == const1_rtx);
993
994 last = get_last_insn ();
995 #ifdef HAVE_builtin_longjmp
996 if (HAVE_builtin_longjmp)
997 emit_insn (gen_builtin_longjmp (buf_addr));
998 else
999 #endif
1000 {
1001 fp = gen_rtx_MEM (Pmode, buf_addr);
1002 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1003 GET_MODE_SIZE (Pmode)));
1004
1005 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1006 2 * GET_MODE_SIZE (Pmode)));
1007 set_mem_alias_set (fp, setjmp_alias_set);
1008 set_mem_alias_set (lab, setjmp_alias_set);
1009 set_mem_alias_set (stack, setjmp_alias_set);
1010
1011 /* Pick up FP, label, and SP from the block and jump. This code is
1012 from expand_goto in stmt.c; see there for detailed comments. */
1013 #ifdef HAVE_nonlocal_goto
1014 if (HAVE_nonlocal_goto)
1015 /* We have to pass a value to the nonlocal_goto pattern that will
1016 get copied into the static_chain pointer, but it does not matter
1017 what that value is, because builtin_setjmp does not use it. */
1018 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1019 else
1020 #endif
1021 {
1022 lab = copy_to_reg (lab);
1023
1024 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1025 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1026
1027 emit_move_insn (hard_frame_pointer_rtx, fp);
1028 emit_stack_restore (SAVE_NONLOCAL, stack);
1029
1030 emit_use (hard_frame_pointer_rtx);
1031 emit_use (stack_pointer_rtx);
1032 emit_indirect_jump (lab);
1033 }
1034 }
1035
1036 /* Search backwards and mark the jump insn as a non-local goto.
1037 Note that this precludes the use of __builtin_longjmp to a
1038 __builtin_setjmp target in the same function. However, we've
1039 already cautioned the user that these functions are for
1040 internal exception handling use only. */
1041 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1042 {
1043 gcc_assert (insn != last);
1044
1045 if (JUMP_P (insn))
1046 {
1047 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1048 break;
1049 }
1050 else if (CALL_P (insn))
1051 break;
1052 }
1053 }
1054
1055 static inline bool
1056 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1057 {
1058 return (iter->i < iter->n);
1059 }
1060
1061 /* This function validates the types of a function call argument list
1062 against a specified list of tree_codes. If the last specifier is a 0,
1063 that represents an ellipses, otherwise the last specifier must be a
1064 VOID_TYPE. */
1065
1066 static bool
1067 validate_arglist (const_tree callexpr, ...)
1068 {
1069 enum tree_code code;
1070 bool res = 0;
1071 va_list ap;
1072 const_call_expr_arg_iterator iter;
1073 const_tree arg;
1074
1075 va_start (ap, callexpr);
1076 init_const_call_expr_arg_iterator (callexpr, &iter);
1077
1078 do
1079 {
1080 code = (enum tree_code) va_arg (ap, int);
1081 switch (code)
1082 {
1083 case 0:
1084 /* This signifies an ellipses, any further arguments are all ok. */
1085 res = true;
1086 goto end;
1087 case VOID_TYPE:
1088 /* This signifies an endlink, if no arguments remain, return
1089 true, otherwise return false. */
1090 res = !more_const_call_expr_args_p (&iter);
1091 goto end;
1092 default:
1093 /* If no parameters remain or the parameter's code does not
1094 match the specified code, return false. Otherwise continue
1095 checking any remaining arguments. */
1096 arg = next_const_call_expr_arg (&iter);
1097 if (!validate_arg (arg, code))
1098 goto end;
1099 break;
1100 }
1101 }
1102 while (1);
1103
1104 /* We need gotos here since we can only have one VA_CLOSE in a
1105 function. */
1106 end: ;
1107 va_end (ap);
1108
1109 return res;
1110 }
1111
1112 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1113 and the address of the save area. */
1114
1115 static rtx
1116 expand_builtin_nonlocal_goto (tree exp)
1117 {
1118 tree t_label, t_save_area;
1119 rtx r_label, r_save_area, r_fp, r_sp, insn;
1120
1121 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1122 return NULL_RTX;
1123
1124 t_label = CALL_EXPR_ARG (exp, 0);
1125 t_save_area = CALL_EXPR_ARG (exp, 1);
1126
1127 r_label = expand_normal (t_label);
1128 r_label = convert_memory_address (Pmode, r_label);
1129 r_save_area = expand_normal (t_save_area);
1130 r_save_area = convert_memory_address (Pmode, r_save_area);
1131 /* Copy the address of the save location to a register just in case it was
1132 based on the frame pointer. */
1133 r_save_area = copy_to_reg (r_save_area);
1134 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1135 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1136 plus_constant (Pmode, r_save_area,
1137 GET_MODE_SIZE (Pmode)));
1138
1139 crtl->has_nonlocal_goto = 1;
1140
1141 #ifdef HAVE_nonlocal_goto
1142 /* ??? We no longer need to pass the static chain value, afaik. */
1143 if (HAVE_nonlocal_goto)
1144 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1145 else
1146 #endif
1147 {
1148 r_label = copy_to_reg (r_label);
1149
1150 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1151 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1152
1153 /* Restore frame pointer for containing function. */
1154 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1155 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1156
1157 /* USE of hard_frame_pointer_rtx added for consistency;
1158 not clear if really needed. */
1159 emit_use (hard_frame_pointer_rtx);
1160 emit_use (stack_pointer_rtx);
1161
1162 /* If the architecture is using a GP register, we must
1163 conservatively assume that the target function makes use of it.
1164 The prologue of functions with nonlocal gotos must therefore
1165 initialize the GP register to the appropriate value, and we
1166 must then make sure that this value is live at the point
1167 of the jump. (Note that this doesn't necessarily apply
1168 to targets with a nonlocal_goto pattern; they are free
1169 to implement it in their own way. Note also that this is
1170 a no-op if the GP register is a global invariant.) */
1171 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1172 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1173 emit_use (pic_offset_table_rtx);
1174
1175 emit_indirect_jump (r_label);
1176 }
1177
1178 /* Search backwards to the jump insn and mark it as a
1179 non-local goto. */
1180 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1181 {
1182 if (JUMP_P (insn))
1183 {
1184 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1185 break;
1186 }
1187 else if (CALL_P (insn))
1188 break;
1189 }
1190
1191 return const0_rtx;
1192 }
1193
1194 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1195 (not all will be used on all machines) that was passed to __builtin_setjmp.
1196 It updates the stack pointer in that block to correspond to the current
1197 stack pointer. */
1198
1199 static void
1200 expand_builtin_update_setjmp_buf (rtx buf_addr)
1201 {
1202 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1203 rtx stack_save
1204 = gen_rtx_MEM (sa_mode,
1205 memory_address
1206 (sa_mode,
1207 plus_constant (Pmode, buf_addr,
1208 2 * GET_MODE_SIZE (Pmode))));
1209
1210 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1211 }
1212
1213 /* Expand a call to __builtin_prefetch. For a target that does not support
1214 data prefetch, evaluate the memory address argument in case it has side
1215 effects. */
1216
1217 static void
1218 expand_builtin_prefetch (tree exp)
1219 {
1220 tree arg0, arg1, arg2;
1221 int nargs;
1222 rtx op0, op1, op2;
1223
1224 if (!validate_arglist (exp, POINTER_TYPE, 0))
1225 return;
1226
1227 arg0 = CALL_EXPR_ARG (exp, 0);
1228
1229 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1230 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1231 locality). */
1232 nargs = call_expr_nargs (exp);
1233 if (nargs > 1)
1234 arg1 = CALL_EXPR_ARG (exp, 1);
1235 else
1236 arg1 = integer_zero_node;
1237 if (nargs > 2)
1238 arg2 = CALL_EXPR_ARG (exp, 2);
1239 else
1240 arg2 = integer_three_node;
1241
1242 /* Argument 0 is an address. */
1243 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1244
1245 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1246 if (TREE_CODE (arg1) != INTEGER_CST)
1247 {
1248 error ("second argument to %<__builtin_prefetch%> must be a constant");
1249 arg1 = integer_zero_node;
1250 }
1251 op1 = expand_normal (arg1);
1252 /* Argument 1 must be either zero or one. */
1253 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1254 {
1255 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1256 " using zero");
1257 op1 = const0_rtx;
1258 }
1259
1260 /* Argument 2 (locality) must be a compile-time constant int. */
1261 if (TREE_CODE (arg2) != INTEGER_CST)
1262 {
1263 error ("third argument to %<__builtin_prefetch%> must be a constant");
1264 arg2 = integer_zero_node;
1265 }
1266 op2 = expand_normal (arg2);
1267 /* Argument 2 must be 0, 1, 2, or 3. */
1268 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1269 {
1270 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1271 op2 = const0_rtx;
1272 }
1273
1274 #ifdef HAVE_prefetch
1275 if (HAVE_prefetch)
1276 {
1277 struct expand_operand ops[3];
1278
1279 create_address_operand (&ops[0], op0);
1280 create_integer_operand (&ops[1], INTVAL (op1));
1281 create_integer_operand (&ops[2], INTVAL (op2));
1282 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1283 return;
1284 }
1285 #endif
1286
1287 /* Don't do anything with direct references to volatile memory, but
1288 generate code to handle other side effects. */
1289 if (!MEM_P (op0) && side_effects_p (op0))
1290 emit_insn (op0);
1291 }
1292
1293 /* Get a MEM rtx for expression EXP which is the address of an operand
1294 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1295 the maximum length of the block of memory that might be accessed or
1296 NULL if unknown. */
1297
1298 static rtx
1299 get_memory_rtx (tree exp, tree len)
1300 {
1301 tree orig_exp = exp;
1302 rtx addr, mem;
1303
1304 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1305 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1306 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1307 exp = TREE_OPERAND (exp, 0);
1308
1309 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1310 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1311
1312 /* Get an expression we can use to find the attributes to assign to MEM.
1313 First remove any nops. */
1314 while (CONVERT_EXPR_P (exp)
1315 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1316 exp = TREE_OPERAND (exp, 0);
1317
1318 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1319 (as builtin stringops may alias with anything). */
1320 exp = fold_build2 (MEM_REF,
1321 build_array_type (char_type_node,
1322 build_range_type (sizetype,
1323 size_one_node, len)),
1324 exp, build_int_cst (ptr_type_node, 0));
1325
1326 /* If the MEM_REF has no acceptable address, try to get the base object
1327 from the original address we got, and build an all-aliasing
1328 unknown-sized access to that one. */
1329 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1330 set_mem_attributes (mem, exp, 0);
1331 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1332 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1333 0))))
1334 {
1335 exp = build_fold_addr_expr (exp);
1336 exp = fold_build2 (MEM_REF,
1337 build_array_type (char_type_node,
1338 build_range_type (sizetype,
1339 size_zero_node,
1340 NULL)),
1341 exp, build_int_cst (ptr_type_node, 0));
1342 set_mem_attributes (mem, exp, 0);
1343 }
1344 set_mem_alias_set (mem, 0);
1345 return mem;
1346 }
1347 \f
1348 /* Built-in functions to perform an untyped call and return. */
1349
1350 #define apply_args_mode \
1351 (this_target_builtins->x_apply_args_mode)
1352 #define apply_result_mode \
1353 (this_target_builtins->x_apply_result_mode)
1354
1355 /* Return the size required for the block returned by __builtin_apply_args,
1356 and initialize apply_args_mode. */
1357
1358 static int
1359 apply_args_size (void)
1360 {
1361 static int size = -1;
1362 int align;
1363 unsigned int regno;
1364 enum machine_mode mode;
1365
1366 /* The values computed by this function never change. */
1367 if (size < 0)
1368 {
1369 /* The first value is the incoming arg-pointer. */
1370 size = GET_MODE_SIZE (Pmode);
1371
1372 /* The second value is the structure value address unless this is
1373 passed as an "invisible" first argument. */
1374 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1375 size += GET_MODE_SIZE (Pmode);
1376
1377 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1378 if (FUNCTION_ARG_REGNO_P (regno))
1379 {
1380 mode = targetm.calls.get_raw_arg_mode (regno);
1381
1382 gcc_assert (mode != VOIDmode);
1383
1384 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1385 if (size % align != 0)
1386 size = CEIL (size, align) * align;
1387 size += GET_MODE_SIZE (mode);
1388 apply_args_mode[regno] = mode;
1389 }
1390 else
1391 {
1392 apply_args_mode[regno] = VOIDmode;
1393 }
1394 }
1395 return size;
1396 }
1397
1398 /* Return the size required for the block returned by __builtin_apply,
1399 and initialize apply_result_mode. */
1400
1401 static int
1402 apply_result_size (void)
1403 {
1404 static int size = -1;
1405 int align, regno;
1406 enum machine_mode mode;
1407
1408 /* The values computed by this function never change. */
1409 if (size < 0)
1410 {
1411 size = 0;
1412
1413 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1414 if (targetm.calls.function_value_regno_p (regno))
1415 {
1416 mode = targetm.calls.get_raw_result_mode (regno);
1417
1418 gcc_assert (mode != VOIDmode);
1419
1420 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1421 if (size % align != 0)
1422 size = CEIL (size, align) * align;
1423 size += GET_MODE_SIZE (mode);
1424 apply_result_mode[regno] = mode;
1425 }
1426 else
1427 apply_result_mode[regno] = VOIDmode;
1428
1429 /* Allow targets that use untyped_call and untyped_return to override
1430 the size so that machine-specific information can be stored here. */
1431 #ifdef APPLY_RESULT_SIZE
1432 size = APPLY_RESULT_SIZE;
1433 #endif
1434 }
1435 return size;
1436 }
1437
1438 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1439 /* Create a vector describing the result block RESULT. If SAVEP is true,
1440 the result block is used to save the values; otherwise it is used to
1441 restore the values. */
1442
1443 static rtx
1444 result_vector (int savep, rtx result)
1445 {
1446 int regno, size, align, nelts;
1447 enum machine_mode mode;
1448 rtx reg, mem;
1449 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1450
1451 size = nelts = 0;
1452 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1453 if ((mode = apply_result_mode[regno]) != VOIDmode)
1454 {
1455 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1456 if (size % align != 0)
1457 size = CEIL (size, align) * align;
1458 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1459 mem = adjust_address (result, mode, size);
1460 savevec[nelts++] = (savep
1461 ? gen_rtx_SET (VOIDmode, mem, reg)
1462 : gen_rtx_SET (VOIDmode, reg, mem));
1463 size += GET_MODE_SIZE (mode);
1464 }
1465 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1466 }
1467 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1468
1469 /* Save the state required to perform an untyped call with the same
1470 arguments as were passed to the current function. */
1471
1472 static rtx
1473 expand_builtin_apply_args_1 (void)
1474 {
1475 rtx registers, tem;
1476 int size, align, regno;
1477 enum machine_mode mode;
1478 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1479
1480 /* Create a block where the arg-pointer, structure value address,
1481 and argument registers can be saved. */
1482 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1483
1484 /* Walk past the arg-pointer and structure value address. */
1485 size = GET_MODE_SIZE (Pmode);
1486 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1487 size += GET_MODE_SIZE (Pmode);
1488
1489 /* Save each register used in calling a function to the block. */
1490 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1491 if ((mode = apply_args_mode[regno]) != VOIDmode)
1492 {
1493 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1494 if (size % align != 0)
1495 size = CEIL (size, align) * align;
1496
1497 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1498
1499 emit_move_insn (adjust_address (registers, mode, size), tem);
1500 size += GET_MODE_SIZE (mode);
1501 }
1502
1503 /* Save the arg pointer to the block. */
1504 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1505 #ifdef STACK_GROWS_DOWNWARD
1506 /* We need the pointer as the caller actually passed them to us, not
1507 as we might have pretended they were passed. Make sure it's a valid
1508 operand, as emit_move_insn isn't expected to handle a PLUS. */
1509 tem
1510 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1511 NULL_RTX);
1512 #endif
1513 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1514
1515 size = GET_MODE_SIZE (Pmode);
1516
1517 /* Save the structure value address unless this is passed as an
1518 "invisible" first argument. */
1519 if (struct_incoming_value)
1520 {
1521 emit_move_insn (adjust_address (registers, Pmode, size),
1522 copy_to_reg (struct_incoming_value));
1523 size += GET_MODE_SIZE (Pmode);
1524 }
1525
1526 /* Return the address of the block. */
1527 return copy_addr_to_reg (XEXP (registers, 0));
1528 }
1529
1530 /* __builtin_apply_args returns block of memory allocated on
1531 the stack into which is stored the arg pointer, structure
1532 value address, static chain, and all the registers that might
1533 possibly be used in performing a function call. The code is
1534 moved to the start of the function so the incoming values are
1535 saved. */
1536
1537 static rtx
1538 expand_builtin_apply_args (void)
1539 {
1540 /* Don't do __builtin_apply_args more than once in a function.
1541 Save the result of the first call and reuse it. */
1542 if (apply_args_value != 0)
1543 return apply_args_value;
1544 {
1545 /* When this function is called, it means that registers must be
1546 saved on entry to this function. So we migrate the
1547 call to the first insn of this function. */
1548 rtx temp;
1549 rtx seq;
1550
1551 start_sequence ();
1552 temp = expand_builtin_apply_args_1 ();
1553 seq = get_insns ();
1554 end_sequence ();
1555
1556 apply_args_value = temp;
1557
1558 /* Put the insns after the NOTE that starts the function.
1559 If this is inside a start_sequence, make the outer-level insn
1560 chain current, so the code is placed at the start of the
1561 function. If internal_arg_pointer is a non-virtual pseudo,
1562 it needs to be placed after the function that initializes
1563 that pseudo. */
1564 push_topmost_sequence ();
1565 if (REG_P (crtl->args.internal_arg_pointer)
1566 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1567 emit_insn_before (seq, parm_birth_insn);
1568 else
1569 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1570 pop_topmost_sequence ();
1571 return temp;
1572 }
1573 }
1574
1575 /* Perform an untyped call and save the state required to perform an
1576 untyped return of whatever value was returned by the given function. */
1577
1578 static rtx
1579 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1580 {
1581 int size, align, regno;
1582 enum machine_mode mode;
1583 rtx incoming_args, result, reg, dest, src, call_insn;
1584 rtx old_stack_level = 0;
1585 rtx call_fusage = 0;
1586 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1587
1588 arguments = convert_memory_address (Pmode, arguments);
1589
1590 /* Create a block where the return registers can be saved. */
1591 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1592
1593 /* Fetch the arg pointer from the ARGUMENTS block. */
1594 incoming_args = gen_reg_rtx (Pmode);
1595 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1596 #ifndef STACK_GROWS_DOWNWARD
1597 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1598 incoming_args, 0, OPTAB_LIB_WIDEN);
1599 #endif
1600
1601 /* Push a new argument block and copy the arguments. Do not allow
1602 the (potential) memcpy call below to interfere with our stack
1603 manipulations. */
1604 do_pending_stack_adjust ();
1605 NO_DEFER_POP;
1606
1607 /* Save the stack with nonlocal if available. */
1608 #ifdef HAVE_save_stack_nonlocal
1609 if (HAVE_save_stack_nonlocal)
1610 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1611 else
1612 #endif
1613 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1614
1615 /* Allocate a block of memory onto the stack and copy the memory
1616 arguments to the outgoing arguments address. We can pass TRUE
1617 as the 4th argument because we just saved the stack pointer
1618 and will restore it right after the call. */
1619 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1620
1621 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1622 may have already set current_function_calls_alloca to true.
1623 current_function_calls_alloca won't be set if argsize is zero,
1624 so we have to guarantee need_drap is true here. */
1625 if (SUPPORTS_STACK_ALIGNMENT)
1626 crtl->need_drap = true;
1627
1628 dest = virtual_outgoing_args_rtx;
1629 #ifndef STACK_GROWS_DOWNWARD
1630 if (CONST_INT_P (argsize))
1631 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1632 else
1633 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1634 #endif
1635 dest = gen_rtx_MEM (BLKmode, dest);
1636 set_mem_align (dest, PARM_BOUNDARY);
1637 src = gen_rtx_MEM (BLKmode, incoming_args);
1638 set_mem_align (src, PARM_BOUNDARY);
1639 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1640
1641 /* Refer to the argument block. */
1642 apply_args_size ();
1643 arguments = gen_rtx_MEM (BLKmode, arguments);
1644 set_mem_align (arguments, PARM_BOUNDARY);
1645
1646 /* Walk past the arg-pointer and structure value address. */
1647 size = GET_MODE_SIZE (Pmode);
1648 if (struct_value)
1649 size += GET_MODE_SIZE (Pmode);
1650
1651 /* Restore each of the registers previously saved. Make USE insns
1652 for each of these registers for use in making the call. */
1653 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1654 if ((mode = apply_args_mode[regno]) != VOIDmode)
1655 {
1656 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1657 if (size % align != 0)
1658 size = CEIL (size, align) * align;
1659 reg = gen_rtx_REG (mode, regno);
1660 emit_move_insn (reg, adjust_address (arguments, mode, size));
1661 use_reg (&call_fusage, reg);
1662 size += GET_MODE_SIZE (mode);
1663 }
1664
1665 /* Restore the structure value address unless this is passed as an
1666 "invisible" first argument. */
1667 size = GET_MODE_SIZE (Pmode);
1668 if (struct_value)
1669 {
1670 rtx value = gen_reg_rtx (Pmode);
1671 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1672 emit_move_insn (struct_value, value);
1673 if (REG_P (struct_value))
1674 use_reg (&call_fusage, struct_value);
1675 size += GET_MODE_SIZE (Pmode);
1676 }
1677
1678 /* All arguments and registers used for the call are set up by now! */
1679 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1680
1681 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1682 and we don't want to load it into a register as an optimization,
1683 because prepare_call_address already did it if it should be done. */
1684 if (GET_CODE (function) != SYMBOL_REF)
1685 function = memory_address (FUNCTION_MODE, function);
1686
1687 /* Generate the actual call instruction and save the return value. */
1688 #ifdef HAVE_untyped_call
1689 if (HAVE_untyped_call)
1690 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1691 result, result_vector (1, result)));
1692 else
1693 #endif
1694 #ifdef HAVE_call_value
1695 if (HAVE_call_value)
1696 {
1697 rtx valreg = 0;
1698
1699 /* Locate the unique return register. It is not possible to
1700 express a call that sets more than one return register using
1701 call_value; use untyped_call for that. In fact, untyped_call
1702 only needs to save the return registers in the given block. */
1703 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1704 if ((mode = apply_result_mode[regno]) != VOIDmode)
1705 {
1706 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1707
1708 valreg = gen_rtx_REG (mode, regno);
1709 }
1710
1711 emit_call_insn (GEN_CALL_VALUE (valreg,
1712 gen_rtx_MEM (FUNCTION_MODE, function),
1713 const0_rtx, NULL_RTX, const0_rtx));
1714
1715 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1716 }
1717 else
1718 #endif
1719 gcc_unreachable ();
1720
1721 /* Find the CALL insn we just emitted, and attach the register usage
1722 information. */
1723 call_insn = last_call_insn ();
1724 add_function_usage_to (call_insn, call_fusage);
1725
1726 /* Restore the stack. */
1727 #ifdef HAVE_save_stack_nonlocal
1728 if (HAVE_save_stack_nonlocal)
1729 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1730 else
1731 #endif
1732 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1733 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1734
1735 OK_DEFER_POP;
1736
1737 /* Return the address of the result block. */
1738 result = copy_addr_to_reg (XEXP (result, 0));
1739 return convert_memory_address (ptr_mode, result);
1740 }
1741
1742 /* Perform an untyped return. */
1743
1744 static void
1745 expand_builtin_return (rtx result)
1746 {
1747 int size, align, regno;
1748 enum machine_mode mode;
1749 rtx reg;
1750 rtx call_fusage = 0;
1751
1752 result = convert_memory_address (Pmode, result);
1753
1754 apply_result_size ();
1755 result = gen_rtx_MEM (BLKmode, result);
1756
1757 #ifdef HAVE_untyped_return
1758 if (HAVE_untyped_return)
1759 {
1760 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1761 emit_barrier ();
1762 return;
1763 }
1764 #endif
1765
1766 /* Restore the return value and note that each value is used. */
1767 size = 0;
1768 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1769 if ((mode = apply_result_mode[regno]) != VOIDmode)
1770 {
1771 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1772 if (size % align != 0)
1773 size = CEIL (size, align) * align;
1774 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1775 emit_move_insn (reg, adjust_address (result, mode, size));
1776
1777 push_to_sequence (call_fusage);
1778 emit_use (reg);
1779 call_fusage = get_insns ();
1780 end_sequence ();
1781 size += GET_MODE_SIZE (mode);
1782 }
1783
1784 /* Put the USE insns before the return. */
1785 emit_insn (call_fusage);
1786
1787 /* Return whatever values was restored by jumping directly to the end
1788 of the function. */
1789 expand_naked_return ();
1790 }
1791
1792 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1793
1794 static enum type_class
1795 type_to_class (tree type)
1796 {
1797 switch (TREE_CODE (type))
1798 {
1799 case VOID_TYPE: return void_type_class;
1800 case INTEGER_TYPE: return integer_type_class;
1801 case ENUMERAL_TYPE: return enumeral_type_class;
1802 case BOOLEAN_TYPE: return boolean_type_class;
1803 case POINTER_TYPE: return pointer_type_class;
1804 case REFERENCE_TYPE: return reference_type_class;
1805 case OFFSET_TYPE: return offset_type_class;
1806 case REAL_TYPE: return real_type_class;
1807 case COMPLEX_TYPE: return complex_type_class;
1808 case FUNCTION_TYPE: return function_type_class;
1809 case METHOD_TYPE: return method_type_class;
1810 case RECORD_TYPE: return record_type_class;
1811 case UNION_TYPE:
1812 case QUAL_UNION_TYPE: return union_type_class;
1813 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1814 ? string_type_class : array_type_class);
1815 case LANG_TYPE: return lang_type_class;
1816 default: return no_type_class;
1817 }
1818 }
1819
1820 /* Expand a call EXP to __builtin_classify_type. */
1821
1822 static rtx
1823 expand_builtin_classify_type (tree exp)
1824 {
1825 if (call_expr_nargs (exp))
1826 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1827 return GEN_INT (no_type_class);
1828 }
1829
1830 /* This helper macro, meant to be used in mathfn_built_in below,
1831 determines which among a set of three builtin math functions is
1832 appropriate for a given type mode. The `F' and `L' cases are
1833 automatically generated from the `double' case. */
1834 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1835 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1836 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1837 fcodel = BUILT_IN_MATHFN##L ; break;
1838 /* Similar to above, but appends _R after any F/L suffix. */
1839 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1840 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1841 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1842 fcodel = BUILT_IN_MATHFN##L_R ; break;
1843
1844 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1845 if available. If IMPLICIT is true use the implicit builtin declaration,
1846 otherwise use the explicit declaration. If we can't do the conversion,
1847 return zero. */
1848
1849 static tree
1850 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1851 {
1852 enum built_in_function fcode, fcodef, fcodel, fcode2;
1853
1854 switch (fn)
1855 {
1856 CASE_MATHFN (BUILT_IN_ACOS)
1857 CASE_MATHFN (BUILT_IN_ACOSH)
1858 CASE_MATHFN (BUILT_IN_ASIN)
1859 CASE_MATHFN (BUILT_IN_ASINH)
1860 CASE_MATHFN (BUILT_IN_ATAN)
1861 CASE_MATHFN (BUILT_IN_ATAN2)
1862 CASE_MATHFN (BUILT_IN_ATANH)
1863 CASE_MATHFN (BUILT_IN_CBRT)
1864 CASE_MATHFN (BUILT_IN_CEIL)
1865 CASE_MATHFN (BUILT_IN_CEXPI)
1866 CASE_MATHFN (BUILT_IN_COPYSIGN)
1867 CASE_MATHFN (BUILT_IN_COS)
1868 CASE_MATHFN (BUILT_IN_COSH)
1869 CASE_MATHFN (BUILT_IN_DREM)
1870 CASE_MATHFN (BUILT_IN_ERF)
1871 CASE_MATHFN (BUILT_IN_ERFC)
1872 CASE_MATHFN (BUILT_IN_EXP)
1873 CASE_MATHFN (BUILT_IN_EXP10)
1874 CASE_MATHFN (BUILT_IN_EXP2)
1875 CASE_MATHFN (BUILT_IN_EXPM1)
1876 CASE_MATHFN (BUILT_IN_FABS)
1877 CASE_MATHFN (BUILT_IN_FDIM)
1878 CASE_MATHFN (BUILT_IN_FLOOR)
1879 CASE_MATHFN (BUILT_IN_FMA)
1880 CASE_MATHFN (BUILT_IN_FMAX)
1881 CASE_MATHFN (BUILT_IN_FMIN)
1882 CASE_MATHFN (BUILT_IN_FMOD)
1883 CASE_MATHFN (BUILT_IN_FREXP)
1884 CASE_MATHFN (BUILT_IN_GAMMA)
1885 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1886 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1887 CASE_MATHFN (BUILT_IN_HYPOT)
1888 CASE_MATHFN (BUILT_IN_ILOGB)
1889 CASE_MATHFN (BUILT_IN_ICEIL)
1890 CASE_MATHFN (BUILT_IN_IFLOOR)
1891 CASE_MATHFN (BUILT_IN_INF)
1892 CASE_MATHFN (BUILT_IN_IRINT)
1893 CASE_MATHFN (BUILT_IN_IROUND)
1894 CASE_MATHFN (BUILT_IN_ISINF)
1895 CASE_MATHFN (BUILT_IN_J0)
1896 CASE_MATHFN (BUILT_IN_J1)
1897 CASE_MATHFN (BUILT_IN_JN)
1898 CASE_MATHFN (BUILT_IN_LCEIL)
1899 CASE_MATHFN (BUILT_IN_LDEXP)
1900 CASE_MATHFN (BUILT_IN_LFLOOR)
1901 CASE_MATHFN (BUILT_IN_LGAMMA)
1902 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1903 CASE_MATHFN (BUILT_IN_LLCEIL)
1904 CASE_MATHFN (BUILT_IN_LLFLOOR)
1905 CASE_MATHFN (BUILT_IN_LLRINT)
1906 CASE_MATHFN (BUILT_IN_LLROUND)
1907 CASE_MATHFN (BUILT_IN_LOG)
1908 CASE_MATHFN (BUILT_IN_LOG10)
1909 CASE_MATHFN (BUILT_IN_LOG1P)
1910 CASE_MATHFN (BUILT_IN_LOG2)
1911 CASE_MATHFN (BUILT_IN_LOGB)
1912 CASE_MATHFN (BUILT_IN_LRINT)
1913 CASE_MATHFN (BUILT_IN_LROUND)
1914 CASE_MATHFN (BUILT_IN_MODF)
1915 CASE_MATHFN (BUILT_IN_NAN)
1916 CASE_MATHFN (BUILT_IN_NANS)
1917 CASE_MATHFN (BUILT_IN_NEARBYINT)
1918 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1919 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1920 CASE_MATHFN (BUILT_IN_POW)
1921 CASE_MATHFN (BUILT_IN_POWI)
1922 CASE_MATHFN (BUILT_IN_POW10)
1923 CASE_MATHFN (BUILT_IN_REMAINDER)
1924 CASE_MATHFN (BUILT_IN_REMQUO)
1925 CASE_MATHFN (BUILT_IN_RINT)
1926 CASE_MATHFN (BUILT_IN_ROUND)
1927 CASE_MATHFN (BUILT_IN_SCALB)
1928 CASE_MATHFN (BUILT_IN_SCALBLN)
1929 CASE_MATHFN (BUILT_IN_SCALBN)
1930 CASE_MATHFN (BUILT_IN_SIGNBIT)
1931 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1932 CASE_MATHFN (BUILT_IN_SIN)
1933 CASE_MATHFN (BUILT_IN_SINCOS)
1934 CASE_MATHFN (BUILT_IN_SINH)
1935 CASE_MATHFN (BUILT_IN_SQRT)
1936 CASE_MATHFN (BUILT_IN_TAN)
1937 CASE_MATHFN (BUILT_IN_TANH)
1938 CASE_MATHFN (BUILT_IN_TGAMMA)
1939 CASE_MATHFN (BUILT_IN_TRUNC)
1940 CASE_MATHFN (BUILT_IN_Y0)
1941 CASE_MATHFN (BUILT_IN_Y1)
1942 CASE_MATHFN (BUILT_IN_YN)
1943
1944 default:
1945 return NULL_TREE;
1946 }
1947
1948 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1949 fcode2 = fcode;
1950 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1951 fcode2 = fcodef;
1952 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1953 fcode2 = fcodel;
1954 else
1955 return NULL_TREE;
1956
1957 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1958 return NULL_TREE;
1959
1960 return builtin_decl_explicit (fcode2);
1961 }
1962
1963 /* Like mathfn_built_in_1(), but always use the implicit array. */
1964
1965 tree
1966 mathfn_built_in (tree type, enum built_in_function fn)
1967 {
1968 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1969 }
1970
1971 /* If errno must be maintained, expand the RTL to check if the result,
1972 TARGET, of a built-in function call, EXP, is NaN, and if so set
1973 errno to EDOM. */
1974
1975 static void
1976 expand_errno_check (tree exp, rtx target)
1977 {
1978 rtx lab = gen_label_rtx ();
1979
1980 /* Test the result; if it is NaN, set errno=EDOM because
1981 the argument was not in the domain. */
1982 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1983 NULL_RTX, NULL_RTX, lab,
1984 /* The jump is very likely. */
1985 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1986
1987 #ifdef TARGET_EDOM
1988 /* If this built-in doesn't throw an exception, set errno directly. */
1989 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1990 {
1991 #ifdef GEN_ERRNO_RTX
1992 rtx errno_rtx = GEN_ERRNO_RTX;
1993 #else
1994 rtx errno_rtx
1995 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1996 #endif
1997 emit_move_insn (errno_rtx,
1998 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1999 emit_label (lab);
2000 return;
2001 }
2002 #endif
2003
2004 /* Make sure the library call isn't expanded as a tail call. */
2005 CALL_EXPR_TAILCALL (exp) = 0;
2006
2007 /* We can't set errno=EDOM directly; let the library call do it.
2008 Pop the arguments right away in case the call gets deleted. */
2009 NO_DEFER_POP;
2010 expand_call (exp, target, 0);
2011 OK_DEFER_POP;
2012 emit_label (lab);
2013 }
2014
2015 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2016 Return NULL_RTX if a normal call should be emitted rather than expanding
2017 the function in-line. EXP is the expression that is a call to the builtin
2018 function; if convenient, the result should be placed in TARGET.
2019 SUBTARGET may be used as the target for computing one of EXP's operands. */
2020
2021 static rtx
2022 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2023 {
2024 optab builtin_optab;
2025 rtx op0, insns;
2026 tree fndecl = get_callee_fndecl (exp);
2027 enum machine_mode mode;
2028 bool errno_set = false;
2029 bool try_widening = false;
2030 tree arg;
2031
2032 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2033 return NULL_RTX;
2034
2035 arg = CALL_EXPR_ARG (exp, 0);
2036
2037 switch (DECL_FUNCTION_CODE (fndecl))
2038 {
2039 CASE_FLT_FN (BUILT_IN_SQRT):
2040 errno_set = ! tree_expr_nonnegative_p (arg);
2041 try_widening = true;
2042 builtin_optab = sqrt_optab;
2043 break;
2044 CASE_FLT_FN (BUILT_IN_EXP):
2045 errno_set = true; builtin_optab = exp_optab; break;
2046 CASE_FLT_FN (BUILT_IN_EXP10):
2047 CASE_FLT_FN (BUILT_IN_POW10):
2048 errno_set = true; builtin_optab = exp10_optab; break;
2049 CASE_FLT_FN (BUILT_IN_EXP2):
2050 errno_set = true; builtin_optab = exp2_optab; break;
2051 CASE_FLT_FN (BUILT_IN_EXPM1):
2052 errno_set = true; builtin_optab = expm1_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOGB):
2054 errno_set = true; builtin_optab = logb_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG):
2056 errno_set = true; builtin_optab = log_optab; break;
2057 CASE_FLT_FN (BUILT_IN_LOG10):
2058 errno_set = true; builtin_optab = log10_optab; break;
2059 CASE_FLT_FN (BUILT_IN_LOG2):
2060 errno_set = true; builtin_optab = log2_optab; break;
2061 CASE_FLT_FN (BUILT_IN_LOG1P):
2062 errno_set = true; builtin_optab = log1p_optab; break;
2063 CASE_FLT_FN (BUILT_IN_ASIN):
2064 builtin_optab = asin_optab; break;
2065 CASE_FLT_FN (BUILT_IN_ACOS):
2066 builtin_optab = acos_optab; break;
2067 CASE_FLT_FN (BUILT_IN_TAN):
2068 builtin_optab = tan_optab; break;
2069 CASE_FLT_FN (BUILT_IN_ATAN):
2070 builtin_optab = atan_optab; break;
2071 CASE_FLT_FN (BUILT_IN_FLOOR):
2072 builtin_optab = floor_optab; break;
2073 CASE_FLT_FN (BUILT_IN_CEIL):
2074 builtin_optab = ceil_optab; break;
2075 CASE_FLT_FN (BUILT_IN_TRUNC):
2076 builtin_optab = btrunc_optab; break;
2077 CASE_FLT_FN (BUILT_IN_ROUND):
2078 builtin_optab = round_optab; break;
2079 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2080 builtin_optab = nearbyint_optab;
2081 if (flag_trapping_math)
2082 break;
2083 /* Else fallthrough and expand as rint. */
2084 CASE_FLT_FN (BUILT_IN_RINT):
2085 builtin_optab = rint_optab; break;
2086 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2087 builtin_optab = significand_optab; break;
2088 default:
2089 gcc_unreachable ();
2090 }
2091
2092 /* Make a suitable register to place result in. */
2093 mode = TYPE_MODE (TREE_TYPE (exp));
2094
2095 if (! flag_errno_math || ! HONOR_NANS (mode))
2096 errno_set = false;
2097
2098 /* Before working hard, check whether the instruction is available, but try
2099 to widen the mode for specific operations. */
2100 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2101 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2102 && (!errno_set || !optimize_insn_for_size_p ()))
2103 {
2104 rtx result = gen_reg_rtx (mode);
2105
2106 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2107 need to expand the argument again. This way, we will not perform
2108 side-effects more the once. */
2109 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2110
2111 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2112
2113 start_sequence ();
2114
2115 /* Compute into RESULT.
2116 Set RESULT to wherever the result comes back. */
2117 result = expand_unop (mode, builtin_optab, op0, result, 0);
2118
2119 if (result != 0)
2120 {
2121 if (errno_set)
2122 expand_errno_check (exp, result);
2123
2124 /* Output the entire sequence. */
2125 insns = get_insns ();
2126 end_sequence ();
2127 emit_insn (insns);
2128 return result;
2129 }
2130
2131 /* If we were unable to expand via the builtin, stop the sequence
2132 (without outputting the insns) and call to the library function
2133 with the stabilized argument list. */
2134 end_sequence ();
2135 }
2136
2137 return expand_call (exp, target, target == const0_rtx);
2138 }
2139
2140 /* Expand a call to the builtin binary math functions (pow and atan2).
2141 Return NULL_RTX if a normal call should be emitted rather than expanding the
2142 function in-line. EXP is the expression that is a call to the builtin
2143 function; if convenient, the result should be placed in TARGET.
2144 SUBTARGET may be used as the target for computing one of EXP's
2145 operands. */
2146
2147 static rtx
2148 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2149 {
2150 optab builtin_optab;
2151 rtx op0, op1, insns, result;
2152 int op1_type = REAL_TYPE;
2153 tree fndecl = get_callee_fndecl (exp);
2154 tree arg0, arg1;
2155 enum machine_mode mode;
2156 bool errno_set = true;
2157
2158 switch (DECL_FUNCTION_CODE (fndecl))
2159 {
2160 CASE_FLT_FN (BUILT_IN_SCALBN):
2161 CASE_FLT_FN (BUILT_IN_SCALBLN):
2162 CASE_FLT_FN (BUILT_IN_LDEXP):
2163 op1_type = INTEGER_TYPE;
2164 default:
2165 break;
2166 }
2167
2168 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2169 return NULL_RTX;
2170
2171 arg0 = CALL_EXPR_ARG (exp, 0);
2172 arg1 = CALL_EXPR_ARG (exp, 1);
2173
2174 switch (DECL_FUNCTION_CODE (fndecl))
2175 {
2176 CASE_FLT_FN (BUILT_IN_POW):
2177 builtin_optab = pow_optab; break;
2178 CASE_FLT_FN (BUILT_IN_ATAN2):
2179 builtin_optab = atan2_optab; break;
2180 CASE_FLT_FN (BUILT_IN_SCALB):
2181 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2182 return 0;
2183 builtin_optab = scalb_optab; break;
2184 CASE_FLT_FN (BUILT_IN_SCALBN):
2185 CASE_FLT_FN (BUILT_IN_SCALBLN):
2186 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2187 return 0;
2188 /* Fall through... */
2189 CASE_FLT_FN (BUILT_IN_LDEXP):
2190 builtin_optab = ldexp_optab; break;
2191 CASE_FLT_FN (BUILT_IN_FMOD):
2192 builtin_optab = fmod_optab; break;
2193 CASE_FLT_FN (BUILT_IN_REMAINDER):
2194 CASE_FLT_FN (BUILT_IN_DREM):
2195 builtin_optab = remainder_optab; break;
2196 default:
2197 gcc_unreachable ();
2198 }
2199
2200 /* Make a suitable register to place result in. */
2201 mode = TYPE_MODE (TREE_TYPE (exp));
2202
2203 /* Before working hard, check whether the instruction is available. */
2204 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2205 return NULL_RTX;
2206
2207 result = gen_reg_rtx (mode);
2208
2209 if (! flag_errno_math || ! HONOR_NANS (mode))
2210 errno_set = false;
2211
2212 if (errno_set && optimize_insn_for_size_p ())
2213 return 0;
2214
2215 /* Always stabilize the argument list. */
2216 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2217 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2218
2219 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2220 op1 = expand_normal (arg1);
2221
2222 start_sequence ();
2223
2224 /* Compute into RESULT.
2225 Set RESULT to wherever the result comes back. */
2226 result = expand_binop (mode, builtin_optab, op0, op1,
2227 result, 0, OPTAB_DIRECT);
2228
2229 /* If we were unable to expand via the builtin, stop the sequence
2230 (without outputting the insns) and call to the library function
2231 with the stabilized argument list. */
2232 if (result == 0)
2233 {
2234 end_sequence ();
2235 return expand_call (exp, target, target == const0_rtx);
2236 }
2237
2238 if (errno_set)
2239 expand_errno_check (exp, result);
2240
2241 /* Output the entire sequence. */
2242 insns = get_insns ();
2243 end_sequence ();
2244 emit_insn (insns);
2245
2246 return result;
2247 }
2248
2249 /* Expand a call to the builtin trinary math functions (fma).
2250 Return NULL_RTX if a normal call should be emitted rather than expanding the
2251 function in-line. EXP is the expression that is a call to the builtin
2252 function; if convenient, the result should be placed in TARGET.
2253 SUBTARGET may be used as the target for computing one of EXP's
2254 operands. */
2255
2256 static rtx
2257 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2258 {
2259 optab builtin_optab;
2260 rtx op0, op1, op2, insns, result;
2261 tree fndecl = get_callee_fndecl (exp);
2262 tree arg0, arg1, arg2;
2263 enum machine_mode mode;
2264
2265 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2266 return NULL_RTX;
2267
2268 arg0 = CALL_EXPR_ARG (exp, 0);
2269 arg1 = CALL_EXPR_ARG (exp, 1);
2270 arg2 = CALL_EXPR_ARG (exp, 2);
2271
2272 switch (DECL_FUNCTION_CODE (fndecl))
2273 {
2274 CASE_FLT_FN (BUILT_IN_FMA):
2275 builtin_optab = fma_optab; break;
2276 default:
2277 gcc_unreachable ();
2278 }
2279
2280 /* Make a suitable register to place result in. */
2281 mode = TYPE_MODE (TREE_TYPE (exp));
2282
2283 /* Before working hard, check whether the instruction is available. */
2284 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2285 return NULL_RTX;
2286
2287 result = gen_reg_rtx (mode);
2288
2289 /* Always stabilize the argument list. */
2290 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2291 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2292 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2293
2294 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2295 op1 = expand_normal (arg1);
2296 op2 = expand_normal (arg2);
2297
2298 start_sequence ();
2299
2300 /* Compute into RESULT.
2301 Set RESULT to wherever the result comes back. */
2302 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2303 result, 0);
2304
2305 /* If we were unable to expand via the builtin, stop the sequence
2306 (without outputting the insns) and call to the library function
2307 with the stabilized argument list. */
2308 if (result == 0)
2309 {
2310 end_sequence ();
2311 return expand_call (exp, target, target == const0_rtx);
2312 }
2313
2314 /* Output the entire sequence. */
2315 insns = get_insns ();
2316 end_sequence ();
2317 emit_insn (insns);
2318
2319 return result;
2320 }
2321
2322 /* Expand a call to the builtin sin and cos math functions.
2323 Return NULL_RTX if a normal call should be emitted rather than expanding the
2324 function in-line. EXP is the expression that is a call to the builtin
2325 function; if convenient, the result should be placed in TARGET.
2326 SUBTARGET may be used as the target for computing one of EXP's
2327 operands. */
2328
2329 static rtx
2330 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2331 {
2332 optab builtin_optab;
2333 rtx op0, insns;
2334 tree fndecl = get_callee_fndecl (exp);
2335 enum machine_mode mode;
2336 tree arg;
2337
2338 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2339 return NULL_RTX;
2340
2341 arg = CALL_EXPR_ARG (exp, 0);
2342
2343 switch (DECL_FUNCTION_CODE (fndecl))
2344 {
2345 CASE_FLT_FN (BUILT_IN_SIN):
2346 CASE_FLT_FN (BUILT_IN_COS):
2347 builtin_optab = sincos_optab; break;
2348 default:
2349 gcc_unreachable ();
2350 }
2351
2352 /* Make a suitable register to place result in. */
2353 mode = TYPE_MODE (TREE_TYPE (exp));
2354
2355 /* Check if sincos insn is available, otherwise fallback
2356 to sin or cos insn. */
2357 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2358 switch (DECL_FUNCTION_CODE (fndecl))
2359 {
2360 CASE_FLT_FN (BUILT_IN_SIN):
2361 builtin_optab = sin_optab; break;
2362 CASE_FLT_FN (BUILT_IN_COS):
2363 builtin_optab = cos_optab; break;
2364 default:
2365 gcc_unreachable ();
2366 }
2367
2368 /* Before working hard, check whether the instruction is available. */
2369 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2370 {
2371 rtx result = gen_reg_rtx (mode);
2372
2373 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2374 need to expand the argument again. This way, we will not perform
2375 side-effects more the once. */
2376 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2377
2378 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2379
2380 start_sequence ();
2381
2382 /* Compute into RESULT.
2383 Set RESULT to wherever the result comes back. */
2384 if (builtin_optab == sincos_optab)
2385 {
2386 int ok;
2387
2388 switch (DECL_FUNCTION_CODE (fndecl))
2389 {
2390 CASE_FLT_FN (BUILT_IN_SIN):
2391 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2392 break;
2393 CASE_FLT_FN (BUILT_IN_COS):
2394 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2395 break;
2396 default:
2397 gcc_unreachable ();
2398 }
2399 gcc_assert (ok);
2400 }
2401 else
2402 result = expand_unop (mode, builtin_optab, op0, result, 0);
2403
2404 if (result != 0)
2405 {
2406 /* Output the entire sequence. */
2407 insns = get_insns ();
2408 end_sequence ();
2409 emit_insn (insns);
2410 return result;
2411 }
2412
2413 /* If we were unable to expand via the builtin, stop the sequence
2414 (without outputting the insns) and call to the library function
2415 with the stabilized argument list. */
2416 end_sequence ();
2417 }
2418
2419 return expand_call (exp, target, target == const0_rtx);
2420 }
2421
2422 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2423 return an RTL instruction code that implements the functionality.
2424 If that isn't possible or available return CODE_FOR_nothing. */
2425
2426 static enum insn_code
2427 interclass_mathfn_icode (tree arg, tree fndecl)
2428 {
2429 bool errno_set = false;
2430 optab builtin_optab = unknown_optab;
2431 enum machine_mode mode;
2432
2433 switch (DECL_FUNCTION_CODE (fndecl))
2434 {
2435 CASE_FLT_FN (BUILT_IN_ILOGB):
2436 errno_set = true; builtin_optab = ilogb_optab; break;
2437 CASE_FLT_FN (BUILT_IN_ISINF):
2438 builtin_optab = isinf_optab; break;
2439 case BUILT_IN_ISNORMAL:
2440 case BUILT_IN_ISFINITE:
2441 CASE_FLT_FN (BUILT_IN_FINITE):
2442 case BUILT_IN_FINITED32:
2443 case BUILT_IN_FINITED64:
2444 case BUILT_IN_FINITED128:
2445 case BUILT_IN_ISINFD32:
2446 case BUILT_IN_ISINFD64:
2447 case BUILT_IN_ISINFD128:
2448 /* These builtins have no optabs (yet). */
2449 break;
2450 default:
2451 gcc_unreachable ();
2452 }
2453
2454 /* There's no easy way to detect the case we need to set EDOM. */
2455 if (flag_errno_math && errno_set)
2456 return CODE_FOR_nothing;
2457
2458 /* Optab mode depends on the mode of the input argument. */
2459 mode = TYPE_MODE (TREE_TYPE (arg));
2460
2461 if (builtin_optab)
2462 return optab_handler (builtin_optab, mode);
2463 return CODE_FOR_nothing;
2464 }
2465
2466 /* Expand a call to one of the builtin math functions that operate on
2467 floating point argument and output an integer result (ilogb, isinf,
2468 isnan, etc).
2469 Return 0 if a normal call should be emitted rather than expanding the
2470 function in-line. EXP is the expression that is a call to the builtin
2471 function; if convenient, the result should be placed in TARGET. */
2472
2473 static rtx
2474 expand_builtin_interclass_mathfn (tree exp, rtx target)
2475 {
2476 enum insn_code icode = CODE_FOR_nothing;
2477 rtx op0;
2478 tree fndecl = get_callee_fndecl (exp);
2479 enum machine_mode mode;
2480 tree arg;
2481
2482 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2483 return NULL_RTX;
2484
2485 arg = CALL_EXPR_ARG (exp, 0);
2486 icode = interclass_mathfn_icode (arg, fndecl);
2487 mode = TYPE_MODE (TREE_TYPE (arg));
2488
2489 if (icode != CODE_FOR_nothing)
2490 {
2491 struct expand_operand ops[1];
2492 rtx last = get_last_insn ();
2493 tree orig_arg = arg;
2494
2495 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2496 need to expand the argument again. This way, we will not perform
2497 side-effects more the once. */
2498 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2499
2500 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2501
2502 if (mode != GET_MODE (op0))
2503 op0 = convert_to_mode (mode, op0, 0);
2504
2505 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2506 if (maybe_legitimize_operands (icode, 0, 1, ops)
2507 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2508 return ops[0].value;
2509
2510 delete_insns_since (last);
2511 CALL_EXPR_ARG (exp, 0) = orig_arg;
2512 }
2513
2514 return NULL_RTX;
2515 }
2516
2517 /* Expand a call to the builtin sincos math function.
2518 Return NULL_RTX if a normal call should be emitted rather than expanding the
2519 function in-line. EXP is the expression that is a call to the builtin
2520 function. */
2521
2522 static rtx
2523 expand_builtin_sincos (tree exp)
2524 {
2525 rtx op0, op1, op2, target1, target2;
2526 enum machine_mode mode;
2527 tree arg, sinp, cosp;
2528 int result;
2529 location_t loc = EXPR_LOCATION (exp);
2530 tree alias_type, alias_off;
2531
2532 if (!validate_arglist (exp, REAL_TYPE,
2533 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2534 return NULL_RTX;
2535
2536 arg = CALL_EXPR_ARG (exp, 0);
2537 sinp = CALL_EXPR_ARG (exp, 1);
2538 cosp = CALL_EXPR_ARG (exp, 2);
2539
2540 /* Make a suitable register to place result in. */
2541 mode = TYPE_MODE (TREE_TYPE (arg));
2542
2543 /* Check if sincos insn is available, otherwise emit the call. */
2544 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2545 return NULL_RTX;
2546
2547 target1 = gen_reg_rtx (mode);
2548 target2 = gen_reg_rtx (mode);
2549
2550 op0 = expand_normal (arg);
2551 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2552 alias_off = build_int_cst (alias_type, 0);
2553 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2554 sinp, alias_off));
2555 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2556 cosp, alias_off));
2557
2558 /* Compute into target1 and target2.
2559 Set TARGET to wherever the result comes back. */
2560 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2561 gcc_assert (result);
2562
2563 /* Move target1 and target2 to the memory locations indicated
2564 by op1 and op2. */
2565 emit_move_insn (op1, target1);
2566 emit_move_insn (op2, target2);
2567
2568 return const0_rtx;
2569 }
2570
2571 /* Expand a call to the internal cexpi builtin to the sincos math function.
2572 EXP is the expression that is a call to the builtin function; if convenient,
2573 the result should be placed in TARGET. */
2574
2575 static rtx
2576 expand_builtin_cexpi (tree exp, rtx target)
2577 {
2578 tree fndecl = get_callee_fndecl (exp);
2579 tree arg, type;
2580 enum machine_mode mode;
2581 rtx op0, op1, op2;
2582 location_t loc = EXPR_LOCATION (exp);
2583
2584 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2585 return NULL_RTX;
2586
2587 arg = CALL_EXPR_ARG (exp, 0);
2588 type = TREE_TYPE (arg);
2589 mode = TYPE_MODE (TREE_TYPE (arg));
2590
2591 /* Try expanding via a sincos optab, fall back to emitting a libcall
2592 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2593 is only generated from sincos, cexp or if we have either of them. */
2594 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2595 {
2596 op1 = gen_reg_rtx (mode);
2597 op2 = gen_reg_rtx (mode);
2598
2599 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2600
2601 /* Compute into op1 and op2. */
2602 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2603 }
2604 else if (targetm.libc_has_function (function_sincos))
2605 {
2606 tree call, fn = NULL_TREE;
2607 tree top1, top2;
2608 rtx op1a, op2a;
2609
2610 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2611 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2613 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2615 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2616 else
2617 gcc_unreachable ();
2618
2619 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2620 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2621 op1a = copy_addr_to_reg (XEXP (op1, 0));
2622 op2a = copy_addr_to_reg (XEXP (op2, 0));
2623 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2624 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2625
2626 /* Make sure not to fold the sincos call again. */
2627 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2628 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2629 call, 3, arg, top1, top2));
2630 }
2631 else
2632 {
2633 tree call, fn = NULL_TREE, narg;
2634 tree ctype = build_complex_type (type);
2635
2636 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2637 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2638 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2639 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2640 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2641 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2642 else
2643 gcc_unreachable ();
2644
2645 /* If we don't have a decl for cexp create one. This is the
2646 friendliest fallback if the user calls __builtin_cexpi
2647 without full target C99 function support. */
2648 if (fn == NULL_TREE)
2649 {
2650 tree fntype;
2651 const char *name = NULL;
2652
2653 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2654 name = "cexpf";
2655 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2656 name = "cexp";
2657 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2658 name = "cexpl";
2659
2660 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2661 fn = build_fn_decl (name, fntype);
2662 }
2663
2664 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2665 build_real (type, dconst0), arg);
2666
2667 /* Make sure not to fold the cexp call again. */
2668 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2669 return expand_expr (build_call_nary (ctype, call, 1, narg),
2670 target, VOIDmode, EXPAND_NORMAL);
2671 }
2672
2673 /* Now build the proper return type. */
2674 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2675 make_tree (TREE_TYPE (arg), op2),
2676 make_tree (TREE_TYPE (arg), op1)),
2677 target, VOIDmode, EXPAND_NORMAL);
2678 }
2679
2680 /* Conveniently construct a function call expression. FNDECL names the
2681 function to be called, N is the number of arguments, and the "..."
2682 parameters are the argument expressions. Unlike build_call_exr
2683 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2684
2685 static tree
2686 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2687 {
2688 va_list ap;
2689 tree fntype = TREE_TYPE (fndecl);
2690 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2691
2692 va_start (ap, n);
2693 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2694 va_end (ap);
2695 SET_EXPR_LOCATION (fn, loc);
2696 return fn;
2697 }
2698
2699 /* Expand a call to one of the builtin rounding functions gcc defines
2700 as an extension (lfloor and lceil). As these are gcc extensions we
2701 do not need to worry about setting errno to EDOM.
2702 If expanding via optab fails, lower expression to (int)(floor(x)).
2703 EXP is the expression that is a call to the builtin function;
2704 if convenient, the result should be placed in TARGET. */
2705
2706 static rtx
2707 expand_builtin_int_roundingfn (tree exp, rtx target)
2708 {
2709 convert_optab builtin_optab;
2710 rtx op0, insns, tmp;
2711 tree fndecl = get_callee_fndecl (exp);
2712 enum built_in_function fallback_fn;
2713 tree fallback_fndecl;
2714 enum machine_mode mode;
2715 tree arg;
2716
2717 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2718 gcc_unreachable ();
2719
2720 arg = CALL_EXPR_ARG (exp, 0);
2721
2722 switch (DECL_FUNCTION_CODE (fndecl))
2723 {
2724 CASE_FLT_FN (BUILT_IN_ICEIL):
2725 CASE_FLT_FN (BUILT_IN_LCEIL):
2726 CASE_FLT_FN (BUILT_IN_LLCEIL):
2727 builtin_optab = lceil_optab;
2728 fallback_fn = BUILT_IN_CEIL;
2729 break;
2730
2731 CASE_FLT_FN (BUILT_IN_IFLOOR):
2732 CASE_FLT_FN (BUILT_IN_LFLOOR):
2733 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2734 builtin_optab = lfloor_optab;
2735 fallback_fn = BUILT_IN_FLOOR;
2736 break;
2737
2738 default:
2739 gcc_unreachable ();
2740 }
2741
2742 /* Make a suitable register to place result in. */
2743 mode = TYPE_MODE (TREE_TYPE (exp));
2744
2745 target = gen_reg_rtx (mode);
2746
2747 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2748 need to expand the argument again. This way, we will not perform
2749 side-effects more the once. */
2750 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2751
2752 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2753
2754 start_sequence ();
2755
2756 /* Compute into TARGET. */
2757 if (expand_sfix_optab (target, op0, builtin_optab))
2758 {
2759 /* Output the entire sequence. */
2760 insns = get_insns ();
2761 end_sequence ();
2762 emit_insn (insns);
2763 return target;
2764 }
2765
2766 /* If we were unable to expand via the builtin, stop the sequence
2767 (without outputting the insns). */
2768 end_sequence ();
2769
2770 /* Fall back to floating point rounding optab. */
2771 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2772
2773 /* For non-C99 targets we may end up without a fallback fndecl here
2774 if the user called __builtin_lfloor directly. In this case emit
2775 a call to the floor/ceil variants nevertheless. This should result
2776 in the best user experience for not full C99 targets. */
2777 if (fallback_fndecl == NULL_TREE)
2778 {
2779 tree fntype;
2780 const char *name = NULL;
2781
2782 switch (DECL_FUNCTION_CODE (fndecl))
2783 {
2784 case BUILT_IN_ICEIL:
2785 case BUILT_IN_LCEIL:
2786 case BUILT_IN_LLCEIL:
2787 name = "ceil";
2788 break;
2789 case BUILT_IN_ICEILF:
2790 case BUILT_IN_LCEILF:
2791 case BUILT_IN_LLCEILF:
2792 name = "ceilf";
2793 break;
2794 case BUILT_IN_ICEILL:
2795 case BUILT_IN_LCEILL:
2796 case BUILT_IN_LLCEILL:
2797 name = "ceill";
2798 break;
2799 case BUILT_IN_IFLOOR:
2800 case BUILT_IN_LFLOOR:
2801 case BUILT_IN_LLFLOOR:
2802 name = "floor";
2803 break;
2804 case BUILT_IN_IFLOORF:
2805 case BUILT_IN_LFLOORF:
2806 case BUILT_IN_LLFLOORF:
2807 name = "floorf";
2808 break;
2809 case BUILT_IN_IFLOORL:
2810 case BUILT_IN_LFLOORL:
2811 case BUILT_IN_LLFLOORL:
2812 name = "floorl";
2813 break;
2814 default:
2815 gcc_unreachable ();
2816 }
2817
2818 fntype = build_function_type_list (TREE_TYPE (arg),
2819 TREE_TYPE (arg), NULL_TREE);
2820 fallback_fndecl = build_fn_decl (name, fntype);
2821 }
2822
2823 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2824
2825 tmp = expand_normal (exp);
2826 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2827
2828 /* Truncate the result of floating point optab to integer
2829 via expand_fix (). */
2830 target = gen_reg_rtx (mode);
2831 expand_fix (target, tmp, 0);
2832
2833 return target;
2834 }
2835
2836 /* Expand a call to one of the builtin math functions doing integer
2837 conversion (lrint).
2838 Return 0 if a normal call should be emitted rather than expanding the
2839 function in-line. EXP is the expression that is a call to the builtin
2840 function; if convenient, the result should be placed in TARGET. */
2841
2842 static rtx
2843 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2844 {
2845 convert_optab builtin_optab;
2846 rtx op0, insns;
2847 tree fndecl = get_callee_fndecl (exp);
2848 tree arg;
2849 enum machine_mode mode;
2850 enum built_in_function fallback_fn = BUILT_IN_NONE;
2851
2852 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2853 gcc_unreachable ();
2854
2855 arg = CALL_EXPR_ARG (exp, 0);
2856
2857 switch (DECL_FUNCTION_CODE (fndecl))
2858 {
2859 CASE_FLT_FN (BUILT_IN_IRINT):
2860 fallback_fn = BUILT_IN_LRINT;
2861 /* FALLTHRU */
2862 CASE_FLT_FN (BUILT_IN_LRINT):
2863 CASE_FLT_FN (BUILT_IN_LLRINT):
2864 builtin_optab = lrint_optab;
2865 break;
2866
2867 CASE_FLT_FN (BUILT_IN_IROUND):
2868 fallback_fn = BUILT_IN_LROUND;
2869 /* FALLTHRU */
2870 CASE_FLT_FN (BUILT_IN_LROUND):
2871 CASE_FLT_FN (BUILT_IN_LLROUND):
2872 builtin_optab = lround_optab;
2873 break;
2874
2875 default:
2876 gcc_unreachable ();
2877 }
2878
2879 /* There's no easy way to detect the case we need to set EDOM. */
2880 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2881 return NULL_RTX;
2882
2883 /* Make a suitable register to place result in. */
2884 mode = TYPE_MODE (TREE_TYPE (exp));
2885
2886 /* There's no easy way to detect the case we need to set EDOM. */
2887 if (!flag_errno_math)
2888 {
2889 rtx result = gen_reg_rtx (mode);
2890
2891 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2892 need to expand the argument again. This way, we will not perform
2893 side-effects more the once. */
2894 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2895
2896 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2897
2898 start_sequence ();
2899
2900 if (expand_sfix_optab (result, op0, builtin_optab))
2901 {
2902 /* Output the entire sequence. */
2903 insns = get_insns ();
2904 end_sequence ();
2905 emit_insn (insns);
2906 return result;
2907 }
2908
2909 /* If we were unable to expand via the builtin, stop the sequence
2910 (without outputting the insns) and call to the library function
2911 with the stabilized argument list. */
2912 end_sequence ();
2913 }
2914
2915 if (fallback_fn != BUILT_IN_NONE)
2916 {
2917 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2918 targets, (int) round (x) should never be transformed into
2919 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2920 a call to lround in the hope that the target provides at least some
2921 C99 functions. This should result in the best user experience for
2922 not full C99 targets. */
2923 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2924 fallback_fn, 0);
2925
2926 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2927 fallback_fndecl, 1, arg);
2928
2929 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2930 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2931 return convert_to_mode (mode, target, 0);
2932 }
2933
2934 return expand_call (exp, target, target == const0_rtx);
2935 }
2936
2937 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2938 a normal call should be emitted rather than expanding the function
2939 in-line. EXP is the expression that is a call to the builtin
2940 function; if convenient, the result should be placed in TARGET. */
2941
2942 static rtx
2943 expand_builtin_powi (tree exp, rtx target)
2944 {
2945 tree arg0, arg1;
2946 rtx op0, op1;
2947 enum machine_mode mode;
2948 enum machine_mode mode2;
2949
2950 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2951 return NULL_RTX;
2952
2953 arg0 = CALL_EXPR_ARG (exp, 0);
2954 arg1 = CALL_EXPR_ARG (exp, 1);
2955 mode = TYPE_MODE (TREE_TYPE (exp));
2956
2957 /* Emit a libcall to libgcc. */
2958
2959 /* Mode of the 2nd argument must match that of an int. */
2960 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2961
2962 if (target == NULL_RTX)
2963 target = gen_reg_rtx (mode);
2964
2965 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2966 if (GET_MODE (op0) != mode)
2967 op0 = convert_to_mode (mode, op0, 0);
2968 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2969 if (GET_MODE (op1) != mode2)
2970 op1 = convert_to_mode (mode2, op1, 0);
2971
2972 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2973 target, LCT_CONST, mode, 2,
2974 op0, mode, op1, mode2);
2975
2976 return target;
2977 }
2978
2979 /* Expand expression EXP which is a call to the strlen builtin. Return
2980 NULL_RTX if we failed the caller should emit a normal call, otherwise
2981 try to get the result in TARGET, if convenient. */
2982
2983 static rtx
2984 expand_builtin_strlen (tree exp, rtx target,
2985 enum machine_mode target_mode)
2986 {
2987 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2988 return NULL_RTX;
2989 else
2990 {
2991 struct expand_operand ops[4];
2992 rtx pat;
2993 tree len;
2994 tree src = CALL_EXPR_ARG (exp, 0);
2995 rtx src_reg, before_strlen;
2996 enum machine_mode insn_mode = target_mode;
2997 enum insn_code icode = CODE_FOR_nothing;
2998 unsigned int align;
2999
3000 /* If the length can be computed at compile-time, return it. */
3001 len = c_strlen (src, 0);
3002 if (len)
3003 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3004
3005 /* If the length can be computed at compile-time and is constant
3006 integer, but there are side-effects in src, evaluate
3007 src for side-effects, then return len.
3008 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3009 can be optimized into: i++; x = 3; */
3010 len = c_strlen (src, 1);
3011 if (len && TREE_CODE (len) == INTEGER_CST)
3012 {
3013 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3015 }
3016
3017 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3018
3019 /* If SRC is not a pointer type, don't do this operation inline. */
3020 if (align == 0)
3021 return NULL_RTX;
3022
3023 /* Bail out if we can't compute strlen in the right mode. */
3024 while (insn_mode != VOIDmode)
3025 {
3026 icode = optab_handler (strlen_optab, insn_mode);
3027 if (icode != CODE_FOR_nothing)
3028 break;
3029
3030 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3031 }
3032 if (insn_mode == VOIDmode)
3033 return NULL_RTX;
3034
3035 /* Make a place to hold the source address. We will not expand
3036 the actual source until we are sure that the expansion will
3037 not fail -- there are trees that cannot be expanded twice. */
3038 src_reg = gen_reg_rtx (Pmode);
3039
3040 /* Mark the beginning of the strlen sequence so we can emit the
3041 source operand later. */
3042 before_strlen = get_last_insn ();
3043
3044 create_output_operand (&ops[0], target, insn_mode);
3045 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3046 create_integer_operand (&ops[2], 0);
3047 create_integer_operand (&ops[3], align);
3048 if (!maybe_expand_insn (icode, 4, ops))
3049 return NULL_RTX;
3050
3051 /* Now that we are assured of success, expand the source. */
3052 start_sequence ();
3053 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3054 if (pat != src_reg)
3055 {
3056 #ifdef POINTERS_EXTEND_UNSIGNED
3057 if (GET_MODE (pat) != Pmode)
3058 pat = convert_to_mode (Pmode, pat,
3059 POINTERS_EXTEND_UNSIGNED);
3060 #endif
3061 emit_move_insn (src_reg, pat);
3062 }
3063 pat = get_insns ();
3064 end_sequence ();
3065
3066 if (before_strlen)
3067 emit_insn_after (pat, before_strlen);
3068 else
3069 emit_insn_before (pat, get_insns ());
3070
3071 /* Return the value in the proper mode for this function. */
3072 if (GET_MODE (ops[0].value) == target_mode)
3073 target = ops[0].value;
3074 else if (target != 0)
3075 convert_move (target, ops[0].value, 0);
3076 else
3077 target = convert_to_mode (target_mode, ops[0].value, 0);
3078
3079 return target;
3080 }
3081 }
3082
3083 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3084 bytes from constant string DATA + OFFSET and return it as target
3085 constant. */
3086
3087 static rtx
3088 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3089 enum machine_mode mode)
3090 {
3091 const char *str = (const char *) data;
3092
3093 gcc_assert (offset >= 0
3094 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3095 <= strlen (str) + 1));
3096
3097 return c_readstr (str + offset, mode);
3098 }
3099
3100 /* LEN specify length of the block of memcpy/memset operation.
3101 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3102 In some cases we can make very likely guess on max size, then we
3103 set it into PROBABLE_MAX_SIZE. */
3104
3105 static void
3106 determine_block_size (tree len, rtx len_rtx,
3107 unsigned HOST_WIDE_INT *min_size,
3108 unsigned HOST_WIDE_INT *max_size,
3109 unsigned HOST_WIDE_INT *probable_max_size)
3110 {
3111 if (CONST_INT_P (len_rtx))
3112 {
3113 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3114 return;
3115 }
3116 else
3117 {
3118 wide_int min, max;
3119 enum value_range_type range_type = VR_UNDEFINED;
3120
3121 /* Determine bounds from the type. */
3122 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3123 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3124 else
3125 *min_size = 0;
3126 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3127 *probable_max_size = *max_size
3128 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3129 else
3130 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3131
3132 if (TREE_CODE (len) == SSA_NAME)
3133 range_type = get_range_info (len, &min, &max);
3134 if (range_type == VR_RANGE)
3135 {
3136 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3137 *min_size = min.to_uhwi ();
3138 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3139 *probable_max_size = *max_size = max.to_uhwi ();
3140 }
3141 else if (range_type == VR_ANTI_RANGE)
3142 {
3143 /* Anti range 0...N lets us to determine minimal size to N+1. */
3144 if (min == 0)
3145 {
3146 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3147 *min_size = max.to_uhwi () + 1;
3148 }
3149 /* Code like
3150
3151 int n;
3152 if (n < 100)
3153 memcpy (a, b, n)
3154
3155 Produce anti range allowing negative values of N. We still
3156 can use the information and make a guess that N is not negative.
3157 */
3158 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3159 *probable_max_size = min.to_uhwi () - 1;
3160 }
3161 }
3162 gcc_checking_assert (*max_size <=
3163 (unsigned HOST_WIDE_INT)
3164 GET_MODE_MASK (GET_MODE (len_rtx)));
3165 }
3166
3167 /* Expand a call EXP to the memcpy builtin.
3168 Return NULL_RTX if we failed, the caller should emit a normal call,
3169 otherwise try to get the result in TARGET, if convenient (and in
3170 mode MODE if that's convenient). */
3171
3172 static rtx
3173 expand_builtin_memcpy (tree exp, rtx target)
3174 {
3175 if (!validate_arglist (exp,
3176 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3177 return NULL_RTX;
3178 else
3179 {
3180 tree dest = CALL_EXPR_ARG (exp, 0);
3181 tree src = CALL_EXPR_ARG (exp, 1);
3182 tree len = CALL_EXPR_ARG (exp, 2);
3183 const char *src_str;
3184 unsigned int src_align = get_pointer_alignment (src);
3185 unsigned int dest_align = get_pointer_alignment (dest);
3186 rtx dest_mem, src_mem, dest_addr, len_rtx;
3187 HOST_WIDE_INT expected_size = -1;
3188 unsigned int expected_align = 0;
3189 unsigned HOST_WIDE_INT min_size;
3190 unsigned HOST_WIDE_INT max_size;
3191 unsigned HOST_WIDE_INT probable_max_size;
3192
3193 /* If DEST is not a pointer type, call the normal function. */
3194 if (dest_align == 0)
3195 return NULL_RTX;
3196
3197 /* If either SRC is not a pointer type, don't do this
3198 operation in-line. */
3199 if (src_align == 0)
3200 return NULL_RTX;
3201
3202 if (currently_expanding_gimple_stmt)
3203 stringop_block_profile (currently_expanding_gimple_stmt,
3204 &expected_align, &expected_size);
3205
3206 if (expected_align < dest_align)
3207 expected_align = dest_align;
3208 dest_mem = get_memory_rtx (dest, len);
3209 set_mem_align (dest_mem, dest_align);
3210 len_rtx = expand_normal (len);
3211 determine_block_size (len, len_rtx, &min_size, &max_size,
3212 &probable_max_size);
3213 src_str = c_getstr (src);
3214
3215 /* If SRC is a string constant and block move would be done
3216 by pieces, we can avoid loading the string from memory
3217 and only stored the computed constants. */
3218 if (src_str
3219 && CONST_INT_P (len_rtx)
3220 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3221 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3222 CONST_CAST (char *, src_str),
3223 dest_align, false))
3224 {
3225 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3226 builtin_memcpy_read_str,
3227 CONST_CAST (char *, src_str),
3228 dest_align, false, 0);
3229 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3230 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3231 return dest_mem;
3232 }
3233
3234 src_mem = get_memory_rtx (src, len);
3235 set_mem_align (src_mem, src_align);
3236
3237 /* Copy word part most expediently. */
3238 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3239 CALL_EXPR_TAILCALL (exp)
3240 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3241 expected_align, expected_size,
3242 min_size, max_size, probable_max_size);
3243
3244 if (dest_addr == 0)
3245 {
3246 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3247 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3248 }
3249 return dest_addr;
3250 }
3251 }
3252
3253 /* Expand a call EXP to the mempcpy builtin.
3254 Return NULL_RTX if we failed; the caller should emit a normal call,
3255 otherwise try to get the result in TARGET, if convenient (and in
3256 mode MODE if that's convenient). If ENDP is 0 return the
3257 destination pointer, if ENDP is 1 return the end pointer ala
3258 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3259 stpcpy. */
3260
3261 static rtx
3262 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3263 {
3264 if (!validate_arglist (exp,
3265 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3266 return NULL_RTX;
3267 else
3268 {
3269 tree dest = CALL_EXPR_ARG (exp, 0);
3270 tree src = CALL_EXPR_ARG (exp, 1);
3271 tree len = CALL_EXPR_ARG (exp, 2);
3272 return expand_builtin_mempcpy_args (dest, src, len,
3273 target, mode, /*endp=*/ 1);
3274 }
3275 }
3276
3277 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3278 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3279 so that this can also be called without constructing an actual CALL_EXPR.
3280 The other arguments and return value are the same as for
3281 expand_builtin_mempcpy. */
3282
3283 static rtx
3284 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3285 rtx target, enum machine_mode mode, int endp)
3286 {
3287 /* If return value is ignored, transform mempcpy into memcpy. */
3288 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3289 {
3290 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3291 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3292 dest, src, len);
3293 return expand_expr (result, target, mode, EXPAND_NORMAL);
3294 }
3295 else
3296 {
3297 const char *src_str;
3298 unsigned int src_align = get_pointer_alignment (src);
3299 unsigned int dest_align = get_pointer_alignment (dest);
3300 rtx dest_mem, src_mem, len_rtx;
3301
3302 /* If either SRC or DEST is not a pointer type, don't do this
3303 operation in-line. */
3304 if (dest_align == 0 || src_align == 0)
3305 return NULL_RTX;
3306
3307 /* If LEN is not constant, call the normal function. */
3308 if (! tree_fits_uhwi_p (len))
3309 return NULL_RTX;
3310
3311 len_rtx = expand_normal (len);
3312 src_str = c_getstr (src);
3313
3314 /* If SRC is a string constant and block move would be done
3315 by pieces, we can avoid loading the string from memory
3316 and only stored the computed constants. */
3317 if (src_str
3318 && CONST_INT_P (len_rtx)
3319 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3320 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3321 CONST_CAST (char *, src_str),
3322 dest_align, false))
3323 {
3324 dest_mem = get_memory_rtx (dest, len);
3325 set_mem_align (dest_mem, dest_align);
3326 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3327 builtin_memcpy_read_str,
3328 CONST_CAST (char *, src_str),
3329 dest_align, false, endp);
3330 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3331 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3332 return dest_mem;
3333 }
3334
3335 if (CONST_INT_P (len_rtx)
3336 && can_move_by_pieces (INTVAL (len_rtx),
3337 MIN (dest_align, src_align)))
3338 {
3339 dest_mem = get_memory_rtx (dest, len);
3340 set_mem_align (dest_mem, dest_align);
3341 src_mem = get_memory_rtx (src, len);
3342 set_mem_align (src_mem, src_align);
3343 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3344 MIN (dest_align, src_align), endp);
3345 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3346 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3347 return dest_mem;
3348 }
3349
3350 return NULL_RTX;
3351 }
3352 }
3353
3354 #ifndef HAVE_movstr
3355 # define HAVE_movstr 0
3356 # define CODE_FOR_movstr CODE_FOR_nothing
3357 #endif
3358
3359 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3360 we failed, the caller should emit a normal call, otherwise try to
3361 get the result in TARGET, if convenient. If ENDP is 0 return the
3362 destination pointer, if ENDP is 1 return the end pointer ala
3363 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3364 stpcpy. */
3365
3366 static rtx
3367 expand_movstr (tree dest, tree src, rtx target, int endp)
3368 {
3369 struct expand_operand ops[3];
3370 rtx dest_mem;
3371 rtx src_mem;
3372
3373 if (!HAVE_movstr)
3374 return NULL_RTX;
3375
3376 dest_mem = get_memory_rtx (dest, NULL);
3377 src_mem = get_memory_rtx (src, NULL);
3378 if (!endp)
3379 {
3380 target = force_reg (Pmode, XEXP (dest_mem, 0));
3381 dest_mem = replace_equiv_address (dest_mem, target);
3382 }
3383
3384 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3385 create_fixed_operand (&ops[1], dest_mem);
3386 create_fixed_operand (&ops[2], src_mem);
3387 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3388 return NULL_RTX;
3389
3390 if (endp && target != const0_rtx)
3391 {
3392 target = ops[0].value;
3393 /* movstr is supposed to set end to the address of the NUL
3394 terminator. If the caller requested a mempcpy-like return value,
3395 adjust it. */
3396 if (endp == 1)
3397 {
3398 rtx tem = plus_constant (GET_MODE (target),
3399 gen_lowpart (GET_MODE (target), target), 1);
3400 emit_move_insn (target, force_operand (tem, NULL_RTX));
3401 }
3402 }
3403 return target;
3404 }
3405
3406 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3407 NULL_RTX if we failed the caller should emit a normal call, otherwise
3408 try to get the result in TARGET, if convenient (and in mode MODE if that's
3409 convenient). */
3410
3411 static rtx
3412 expand_builtin_strcpy (tree exp, rtx target)
3413 {
3414 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3415 {
3416 tree dest = CALL_EXPR_ARG (exp, 0);
3417 tree src = CALL_EXPR_ARG (exp, 1);
3418 return expand_builtin_strcpy_args (dest, src, target);
3419 }
3420 return NULL_RTX;
3421 }
3422
3423 /* Helper function to do the actual work for expand_builtin_strcpy. The
3424 arguments to the builtin_strcpy call DEST and SRC are broken out
3425 so that this can also be called without constructing an actual CALL_EXPR.
3426 The other arguments and return value are the same as for
3427 expand_builtin_strcpy. */
3428
3429 static rtx
3430 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3431 {
3432 return expand_movstr (dest, src, target, /*endp=*/0);
3433 }
3434
3435 /* Expand a call EXP to the stpcpy builtin.
3436 Return NULL_RTX if we failed the caller should emit a normal call,
3437 otherwise try to get the result in TARGET, if convenient (and in
3438 mode MODE if that's convenient). */
3439
3440 static rtx
3441 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3442 {
3443 tree dst, src;
3444 location_t loc = EXPR_LOCATION (exp);
3445
3446 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3447 return NULL_RTX;
3448
3449 dst = CALL_EXPR_ARG (exp, 0);
3450 src = CALL_EXPR_ARG (exp, 1);
3451
3452 /* If return value is ignored, transform stpcpy into strcpy. */
3453 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3454 {
3455 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3456 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3457 return expand_expr (result, target, mode, EXPAND_NORMAL);
3458 }
3459 else
3460 {
3461 tree len, lenp1;
3462 rtx ret;
3463
3464 /* Ensure we get an actual string whose length can be evaluated at
3465 compile-time, not an expression containing a string. This is
3466 because the latter will potentially produce pessimized code
3467 when used to produce the return value. */
3468 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3469 return expand_movstr (dst, src, target, /*endp=*/2);
3470
3471 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3472 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3473 target, mode, /*endp=*/2);
3474
3475 if (ret)
3476 return ret;
3477
3478 if (TREE_CODE (len) == INTEGER_CST)
3479 {
3480 rtx len_rtx = expand_normal (len);
3481
3482 if (CONST_INT_P (len_rtx))
3483 {
3484 ret = expand_builtin_strcpy_args (dst, src, target);
3485
3486 if (ret)
3487 {
3488 if (! target)
3489 {
3490 if (mode != VOIDmode)
3491 target = gen_reg_rtx (mode);
3492 else
3493 target = gen_reg_rtx (GET_MODE (ret));
3494 }
3495 if (GET_MODE (target) != GET_MODE (ret))
3496 ret = gen_lowpart (GET_MODE (target), ret);
3497
3498 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3499 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3500 gcc_assert (ret);
3501
3502 return target;
3503 }
3504 }
3505 }
3506
3507 return expand_movstr (dst, src, target, /*endp=*/2);
3508 }
3509 }
3510
3511 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3512 bytes from constant string DATA + OFFSET and return it as target
3513 constant. */
3514
3515 rtx
3516 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3517 enum machine_mode mode)
3518 {
3519 const char *str = (const char *) data;
3520
3521 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3522 return const0_rtx;
3523
3524 return c_readstr (str + offset, mode);
3525 }
3526
3527 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3528 NULL_RTX if we failed the caller should emit a normal call. */
3529
3530 static rtx
3531 expand_builtin_strncpy (tree exp, rtx target)
3532 {
3533 location_t loc = EXPR_LOCATION (exp);
3534
3535 if (validate_arglist (exp,
3536 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3537 {
3538 tree dest = CALL_EXPR_ARG (exp, 0);
3539 tree src = CALL_EXPR_ARG (exp, 1);
3540 tree len = CALL_EXPR_ARG (exp, 2);
3541 tree slen = c_strlen (src, 1);
3542
3543 /* We must be passed a constant len and src parameter. */
3544 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3545 return NULL_RTX;
3546
3547 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3548
3549 /* We're required to pad with trailing zeros if the requested
3550 len is greater than strlen(s2)+1. In that case try to
3551 use store_by_pieces, if it fails, punt. */
3552 if (tree_int_cst_lt (slen, len))
3553 {
3554 unsigned int dest_align = get_pointer_alignment (dest);
3555 const char *p = c_getstr (src);
3556 rtx dest_mem;
3557
3558 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3559 || !can_store_by_pieces (tree_to_uhwi (len),
3560 builtin_strncpy_read_str,
3561 CONST_CAST (char *, p),
3562 dest_align, false))
3563 return NULL_RTX;
3564
3565 dest_mem = get_memory_rtx (dest, len);
3566 store_by_pieces (dest_mem, tree_to_uhwi (len),
3567 builtin_strncpy_read_str,
3568 CONST_CAST (char *, p), dest_align, false, 0);
3569 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3570 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3571 return dest_mem;
3572 }
3573 }
3574 return NULL_RTX;
3575 }
3576
3577 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3578 bytes from constant string DATA + OFFSET and return it as target
3579 constant. */
3580
3581 rtx
3582 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3583 enum machine_mode mode)
3584 {
3585 const char *c = (const char *) data;
3586 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3587
3588 memset (p, *c, GET_MODE_SIZE (mode));
3589
3590 return c_readstr (p, mode);
3591 }
3592
3593 /* Callback routine for store_by_pieces. Return the RTL of a register
3594 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3595 char value given in the RTL register data. For example, if mode is
3596 4 bytes wide, return the RTL for 0x01010101*data. */
3597
3598 static rtx
3599 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3600 enum machine_mode mode)
3601 {
3602 rtx target, coeff;
3603 size_t size;
3604 char *p;
3605
3606 size = GET_MODE_SIZE (mode);
3607 if (size == 1)
3608 return (rtx) data;
3609
3610 p = XALLOCAVEC (char, size);
3611 memset (p, 1, size);
3612 coeff = c_readstr (p, mode);
3613
3614 target = convert_to_mode (mode, (rtx) data, 1);
3615 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3616 return force_reg (mode, target);
3617 }
3618
3619 /* Expand expression EXP, which is a call to the memset builtin. Return
3620 NULL_RTX if we failed the caller should emit a normal call, otherwise
3621 try to get the result in TARGET, if convenient (and in mode MODE if that's
3622 convenient). */
3623
3624 static rtx
3625 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3626 {
3627 if (!validate_arglist (exp,
3628 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3629 return NULL_RTX;
3630 else
3631 {
3632 tree dest = CALL_EXPR_ARG (exp, 0);
3633 tree val = CALL_EXPR_ARG (exp, 1);
3634 tree len = CALL_EXPR_ARG (exp, 2);
3635 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3636 }
3637 }
3638
3639 /* Helper function to do the actual work for expand_builtin_memset. The
3640 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3641 so that this can also be called without constructing an actual CALL_EXPR.
3642 The other arguments and return value are the same as for
3643 expand_builtin_memset. */
3644
3645 static rtx
3646 expand_builtin_memset_args (tree dest, tree val, tree len,
3647 rtx target, enum machine_mode mode, tree orig_exp)
3648 {
3649 tree fndecl, fn;
3650 enum built_in_function fcode;
3651 enum machine_mode val_mode;
3652 char c;
3653 unsigned int dest_align;
3654 rtx dest_mem, dest_addr, len_rtx;
3655 HOST_WIDE_INT expected_size = -1;
3656 unsigned int expected_align = 0;
3657 unsigned HOST_WIDE_INT min_size;
3658 unsigned HOST_WIDE_INT max_size;
3659 unsigned HOST_WIDE_INT probable_max_size;
3660
3661 dest_align = get_pointer_alignment (dest);
3662
3663 /* If DEST is not a pointer type, don't do this operation in-line. */
3664 if (dest_align == 0)
3665 return NULL_RTX;
3666
3667 if (currently_expanding_gimple_stmt)
3668 stringop_block_profile (currently_expanding_gimple_stmt,
3669 &expected_align, &expected_size);
3670
3671 if (expected_align < dest_align)
3672 expected_align = dest_align;
3673
3674 /* If the LEN parameter is zero, return DEST. */
3675 if (integer_zerop (len))
3676 {
3677 /* Evaluate and ignore VAL in case it has side-effects. */
3678 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3679 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3680 }
3681
3682 /* Stabilize the arguments in case we fail. */
3683 dest = builtin_save_expr (dest);
3684 val = builtin_save_expr (val);
3685 len = builtin_save_expr (len);
3686
3687 len_rtx = expand_normal (len);
3688 determine_block_size (len, len_rtx, &min_size, &max_size,
3689 &probable_max_size);
3690 dest_mem = get_memory_rtx (dest, len);
3691 val_mode = TYPE_MODE (unsigned_char_type_node);
3692
3693 if (TREE_CODE (val) != INTEGER_CST)
3694 {
3695 rtx val_rtx;
3696
3697 val_rtx = expand_normal (val);
3698 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3699
3700 /* Assume that we can memset by pieces if we can store
3701 * the coefficients by pieces (in the required modes).
3702 * We can't pass builtin_memset_gen_str as that emits RTL. */
3703 c = 1;
3704 if (tree_fits_uhwi_p (len)
3705 && can_store_by_pieces (tree_to_uhwi (len),
3706 builtin_memset_read_str, &c, dest_align,
3707 true))
3708 {
3709 val_rtx = force_reg (val_mode, val_rtx);
3710 store_by_pieces (dest_mem, tree_to_uhwi (len),
3711 builtin_memset_gen_str, val_rtx, dest_align,
3712 true, 0);
3713 }
3714 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3715 dest_align, expected_align,
3716 expected_size, min_size, max_size,
3717 probable_max_size))
3718 goto do_libcall;
3719
3720 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3721 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3722 return dest_mem;
3723 }
3724
3725 if (target_char_cast (val, &c))
3726 goto do_libcall;
3727
3728 if (c)
3729 {
3730 if (tree_fits_uhwi_p (len)
3731 && can_store_by_pieces (tree_to_uhwi (len),
3732 builtin_memset_read_str, &c, dest_align,
3733 true))
3734 store_by_pieces (dest_mem, tree_to_uhwi (len),
3735 builtin_memset_read_str, &c, dest_align, true, 0);
3736 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3737 gen_int_mode (c, val_mode),
3738 dest_align, expected_align,
3739 expected_size, min_size, max_size,
3740 probable_max_size))
3741 goto do_libcall;
3742
3743 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3744 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3745 return dest_mem;
3746 }
3747
3748 set_mem_align (dest_mem, dest_align);
3749 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3750 CALL_EXPR_TAILCALL (orig_exp)
3751 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3752 expected_align, expected_size,
3753 min_size, max_size,
3754 probable_max_size);
3755
3756 if (dest_addr == 0)
3757 {
3758 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3759 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3760 }
3761
3762 return dest_addr;
3763
3764 do_libcall:
3765 fndecl = get_callee_fndecl (orig_exp);
3766 fcode = DECL_FUNCTION_CODE (fndecl);
3767 if (fcode == BUILT_IN_MEMSET)
3768 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3769 dest, val, len);
3770 else if (fcode == BUILT_IN_BZERO)
3771 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3772 dest, len);
3773 else
3774 gcc_unreachable ();
3775 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3776 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3777 return expand_call (fn, target, target == const0_rtx);
3778 }
3779
3780 /* Expand expression EXP, which is a call to the bzero builtin. Return
3781 NULL_RTX if we failed the caller should emit a normal call. */
3782
3783 static rtx
3784 expand_builtin_bzero (tree exp)
3785 {
3786 tree dest, size;
3787 location_t loc = EXPR_LOCATION (exp);
3788
3789 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3790 return NULL_RTX;
3791
3792 dest = CALL_EXPR_ARG (exp, 0);
3793 size = CALL_EXPR_ARG (exp, 1);
3794
3795 /* New argument list transforming bzero(ptr x, int y) to
3796 memset(ptr x, int 0, size_t y). This is done this way
3797 so that if it isn't expanded inline, we fallback to
3798 calling bzero instead of memset. */
3799
3800 return expand_builtin_memset_args (dest, integer_zero_node,
3801 fold_convert_loc (loc,
3802 size_type_node, size),
3803 const0_rtx, VOIDmode, exp);
3804 }
3805
3806 /* Expand expression EXP, which is a call to the memcmp built-in function.
3807 Return NULL_RTX if we failed and the caller should emit a normal call,
3808 otherwise try to get the result in TARGET, if convenient (and in mode
3809 MODE, if that's convenient). */
3810
3811 static rtx
3812 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3813 ATTRIBUTE_UNUSED enum machine_mode mode)
3814 {
3815 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3816
3817 if (!validate_arglist (exp,
3818 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3819 return NULL_RTX;
3820
3821 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3822 implementing memcmp because it will stop if it encounters two
3823 zero bytes. */
3824 #if defined HAVE_cmpmemsi
3825 {
3826 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3827 rtx result;
3828 rtx insn;
3829 tree arg1 = CALL_EXPR_ARG (exp, 0);
3830 tree arg2 = CALL_EXPR_ARG (exp, 1);
3831 tree len = CALL_EXPR_ARG (exp, 2);
3832
3833 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3834 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3835 enum machine_mode insn_mode;
3836
3837 if (HAVE_cmpmemsi)
3838 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3839 else
3840 return NULL_RTX;
3841
3842 /* If we don't have POINTER_TYPE, call the function. */
3843 if (arg1_align == 0 || arg2_align == 0)
3844 return NULL_RTX;
3845
3846 /* Make a place to write the result of the instruction. */
3847 result = target;
3848 if (! (result != 0
3849 && REG_P (result) && GET_MODE (result) == insn_mode
3850 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3851 result = gen_reg_rtx (insn_mode);
3852
3853 arg1_rtx = get_memory_rtx (arg1, len);
3854 arg2_rtx = get_memory_rtx (arg2, len);
3855 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3856
3857 /* Set MEM_SIZE as appropriate. */
3858 if (CONST_INT_P (arg3_rtx))
3859 {
3860 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3861 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3862 }
3863
3864 if (HAVE_cmpmemsi)
3865 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3866 GEN_INT (MIN (arg1_align, arg2_align)));
3867 else
3868 gcc_unreachable ();
3869
3870 if (insn)
3871 emit_insn (insn);
3872 else
3873 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3874 TYPE_MODE (integer_type_node), 3,
3875 XEXP (arg1_rtx, 0), Pmode,
3876 XEXP (arg2_rtx, 0), Pmode,
3877 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3878 TYPE_UNSIGNED (sizetype)),
3879 TYPE_MODE (sizetype));
3880
3881 /* Return the value in the proper mode for this function. */
3882 mode = TYPE_MODE (TREE_TYPE (exp));
3883 if (GET_MODE (result) == mode)
3884 return result;
3885 else if (target != 0)
3886 {
3887 convert_move (target, result, 0);
3888 return target;
3889 }
3890 else
3891 return convert_to_mode (mode, result, 0);
3892 }
3893 #endif /* HAVE_cmpmemsi. */
3894
3895 return NULL_RTX;
3896 }
3897
3898 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3899 if we failed the caller should emit a normal call, otherwise try to get
3900 the result in TARGET, if convenient. */
3901
3902 static rtx
3903 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3904 {
3905 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3906 return NULL_RTX;
3907
3908 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3909 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3910 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3911 {
3912 rtx arg1_rtx, arg2_rtx;
3913 rtx result, insn = NULL_RTX;
3914 tree fndecl, fn;
3915 tree arg1 = CALL_EXPR_ARG (exp, 0);
3916 tree arg2 = CALL_EXPR_ARG (exp, 1);
3917
3918 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3919 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3920
3921 /* If we don't have POINTER_TYPE, call the function. */
3922 if (arg1_align == 0 || arg2_align == 0)
3923 return NULL_RTX;
3924
3925 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3926 arg1 = builtin_save_expr (arg1);
3927 arg2 = builtin_save_expr (arg2);
3928
3929 arg1_rtx = get_memory_rtx (arg1, NULL);
3930 arg2_rtx = get_memory_rtx (arg2, NULL);
3931
3932 #ifdef HAVE_cmpstrsi
3933 /* Try to call cmpstrsi. */
3934 if (HAVE_cmpstrsi)
3935 {
3936 enum machine_mode insn_mode
3937 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3938
3939 /* Make a place to write the result of the instruction. */
3940 result = target;
3941 if (! (result != 0
3942 && REG_P (result) && GET_MODE (result) == insn_mode
3943 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3944 result = gen_reg_rtx (insn_mode);
3945
3946 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3947 GEN_INT (MIN (arg1_align, arg2_align)));
3948 }
3949 #endif
3950 #ifdef HAVE_cmpstrnsi
3951 /* Try to determine at least one length and call cmpstrnsi. */
3952 if (!insn && HAVE_cmpstrnsi)
3953 {
3954 tree len;
3955 rtx arg3_rtx;
3956
3957 enum machine_mode insn_mode
3958 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3959 tree len1 = c_strlen (arg1, 1);
3960 tree len2 = c_strlen (arg2, 1);
3961
3962 if (len1)
3963 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3964 if (len2)
3965 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3966
3967 /* If we don't have a constant length for the first, use the length
3968 of the second, if we know it. We don't require a constant for
3969 this case; some cost analysis could be done if both are available
3970 but neither is constant. For now, assume they're equally cheap,
3971 unless one has side effects. If both strings have constant lengths,
3972 use the smaller. */
3973
3974 if (!len1)
3975 len = len2;
3976 else if (!len2)
3977 len = len1;
3978 else if (TREE_SIDE_EFFECTS (len1))
3979 len = len2;
3980 else if (TREE_SIDE_EFFECTS (len2))
3981 len = len1;
3982 else if (TREE_CODE (len1) != INTEGER_CST)
3983 len = len2;
3984 else if (TREE_CODE (len2) != INTEGER_CST)
3985 len = len1;
3986 else if (tree_int_cst_lt (len1, len2))
3987 len = len1;
3988 else
3989 len = len2;
3990
3991 /* If both arguments have side effects, we cannot optimize. */
3992 if (!len || TREE_SIDE_EFFECTS (len))
3993 goto do_libcall;
3994
3995 arg3_rtx = expand_normal (len);
3996
3997 /* Make a place to write the result of the instruction. */
3998 result = target;
3999 if (! (result != 0
4000 && REG_P (result) && GET_MODE (result) == insn_mode
4001 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4002 result = gen_reg_rtx (insn_mode);
4003
4004 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4005 GEN_INT (MIN (arg1_align, arg2_align)));
4006 }
4007 #endif
4008
4009 if (insn)
4010 {
4011 enum machine_mode mode;
4012 emit_insn (insn);
4013
4014 /* Return the value in the proper mode for this function. */
4015 mode = TYPE_MODE (TREE_TYPE (exp));
4016 if (GET_MODE (result) == mode)
4017 return result;
4018 if (target == 0)
4019 return convert_to_mode (mode, result, 0);
4020 convert_move (target, result, 0);
4021 return target;
4022 }
4023
4024 /* Expand the library call ourselves using a stabilized argument
4025 list to avoid re-evaluating the function's arguments twice. */
4026 #ifdef HAVE_cmpstrnsi
4027 do_libcall:
4028 #endif
4029 fndecl = get_callee_fndecl (exp);
4030 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4031 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4032 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4033 return expand_call (fn, target, target == const0_rtx);
4034 }
4035 #endif
4036 return NULL_RTX;
4037 }
4038
4039 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4040 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4041 the result in TARGET, if convenient. */
4042
4043 static rtx
4044 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4045 ATTRIBUTE_UNUSED enum machine_mode mode)
4046 {
4047 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4048
4049 if (!validate_arglist (exp,
4050 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4051 return NULL_RTX;
4052
4053 /* If c_strlen can determine an expression for one of the string
4054 lengths, and it doesn't have side effects, then emit cmpstrnsi
4055 using length MIN(strlen(string)+1, arg3). */
4056 #ifdef HAVE_cmpstrnsi
4057 if (HAVE_cmpstrnsi)
4058 {
4059 tree len, len1, len2;
4060 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4061 rtx result, insn;
4062 tree fndecl, fn;
4063 tree arg1 = CALL_EXPR_ARG (exp, 0);
4064 tree arg2 = CALL_EXPR_ARG (exp, 1);
4065 tree arg3 = CALL_EXPR_ARG (exp, 2);
4066
4067 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4068 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4069 enum machine_mode insn_mode
4070 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4071
4072 len1 = c_strlen (arg1, 1);
4073 len2 = c_strlen (arg2, 1);
4074
4075 if (len1)
4076 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4077 if (len2)
4078 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4079
4080 /* If we don't have a constant length for the first, use the length
4081 of the second, if we know it. We don't require a constant for
4082 this case; some cost analysis could be done if both are available
4083 but neither is constant. For now, assume they're equally cheap,
4084 unless one has side effects. If both strings have constant lengths,
4085 use the smaller. */
4086
4087 if (!len1)
4088 len = len2;
4089 else if (!len2)
4090 len = len1;
4091 else if (TREE_SIDE_EFFECTS (len1))
4092 len = len2;
4093 else if (TREE_SIDE_EFFECTS (len2))
4094 len = len1;
4095 else if (TREE_CODE (len1) != INTEGER_CST)
4096 len = len2;
4097 else if (TREE_CODE (len2) != INTEGER_CST)
4098 len = len1;
4099 else if (tree_int_cst_lt (len1, len2))
4100 len = len1;
4101 else
4102 len = len2;
4103
4104 /* If both arguments have side effects, we cannot optimize. */
4105 if (!len || TREE_SIDE_EFFECTS (len))
4106 return NULL_RTX;
4107
4108 /* The actual new length parameter is MIN(len,arg3). */
4109 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4110 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4111
4112 /* If we don't have POINTER_TYPE, call the function. */
4113 if (arg1_align == 0 || arg2_align == 0)
4114 return NULL_RTX;
4115
4116 /* Make a place to write the result of the instruction. */
4117 result = target;
4118 if (! (result != 0
4119 && REG_P (result) && GET_MODE (result) == insn_mode
4120 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4121 result = gen_reg_rtx (insn_mode);
4122
4123 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4124 arg1 = builtin_save_expr (arg1);
4125 arg2 = builtin_save_expr (arg2);
4126 len = builtin_save_expr (len);
4127
4128 arg1_rtx = get_memory_rtx (arg1, len);
4129 arg2_rtx = get_memory_rtx (arg2, len);
4130 arg3_rtx = expand_normal (len);
4131 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4132 GEN_INT (MIN (arg1_align, arg2_align)));
4133 if (insn)
4134 {
4135 emit_insn (insn);
4136
4137 /* Return the value in the proper mode for this function. */
4138 mode = TYPE_MODE (TREE_TYPE (exp));
4139 if (GET_MODE (result) == mode)
4140 return result;
4141 if (target == 0)
4142 return convert_to_mode (mode, result, 0);
4143 convert_move (target, result, 0);
4144 return target;
4145 }
4146
4147 /* Expand the library call ourselves using a stabilized argument
4148 list to avoid re-evaluating the function's arguments twice. */
4149 fndecl = get_callee_fndecl (exp);
4150 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4151 arg1, arg2, len);
4152 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4153 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4154 return expand_call (fn, target, target == const0_rtx);
4155 }
4156 #endif
4157 return NULL_RTX;
4158 }
4159
4160 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4161 if that's convenient. */
4162
4163 rtx
4164 expand_builtin_saveregs (void)
4165 {
4166 rtx val, seq;
4167
4168 /* Don't do __builtin_saveregs more than once in a function.
4169 Save the result of the first call and reuse it. */
4170 if (saveregs_value != 0)
4171 return saveregs_value;
4172
4173 /* When this function is called, it means that registers must be
4174 saved on entry to this function. So we migrate the call to the
4175 first insn of this function. */
4176
4177 start_sequence ();
4178
4179 /* Do whatever the machine needs done in this case. */
4180 val = targetm.calls.expand_builtin_saveregs ();
4181
4182 seq = get_insns ();
4183 end_sequence ();
4184
4185 saveregs_value = val;
4186
4187 /* Put the insns after the NOTE that starts the function. If this
4188 is inside a start_sequence, make the outer-level insn chain current, so
4189 the code is placed at the start of the function. */
4190 push_topmost_sequence ();
4191 emit_insn_after (seq, entry_of_function ());
4192 pop_topmost_sequence ();
4193
4194 return val;
4195 }
4196
4197 /* Expand a call to __builtin_next_arg. */
4198
4199 static rtx
4200 expand_builtin_next_arg (void)
4201 {
4202 /* Checking arguments is already done in fold_builtin_next_arg
4203 that must be called before this function. */
4204 return expand_binop (ptr_mode, add_optab,
4205 crtl->args.internal_arg_pointer,
4206 crtl->args.arg_offset_rtx,
4207 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4208 }
4209
4210 /* Make it easier for the backends by protecting the valist argument
4211 from multiple evaluations. */
4212
4213 static tree
4214 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4215 {
4216 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4217
4218 /* The current way of determining the type of valist is completely
4219 bogus. We should have the information on the va builtin instead. */
4220 if (!vatype)
4221 vatype = targetm.fn_abi_va_list (cfun->decl);
4222
4223 if (TREE_CODE (vatype) == ARRAY_TYPE)
4224 {
4225 if (TREE_SIDE_EFFECTS (valist))
4226 valist = save_expr (valist);
4227
4228 /* For this case, the backends will be expecting a pointer to
4229 vatype, but it's possible we've actually been given an array
4230 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4231 So fix it. */
4232 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4233 {
4234 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4235 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4236 }
4237 }
4238 else
4239 {
4240 tree pt = build_pointer_type (vatype);
4241
4242 if (! needs_lvalue)
4243 {
4244 if (! TREE_SIDE_EFFECTS (valist))
4245 return valist;
4246
4247 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4248 TREE_SIDE_EFFECTS (valist) = 1;
4249 }
4250
4251 if (TREE_SIDE_EFFECTS (valist))
4252 valist = save_expr (valist);
4253 valist = fold_build2_loc (loc, MEM_REF,
4254 vatype, valist, build_int_cst (pt, 0));
4255 }
4256
4257 return valist;
4258 }
4259
4260 /* The "standard" definition of va_list is void*. */
4261
4262 tree
4263 std_build_builtin_va_list (void)
4264 {
4265 return ptr_type_node;
4266 }
4267
4268 /* The "standard" abi va_list is va_list_type_node. */
4269
4270 tree
4271 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4272 {
4273 return va_list_type_node;
4274 }
4275
4276 /* The "standard" type of va_list is va_list_type_node. */
4277
4278 tree
4279 std_canonical_va_list_type (tree type)
4280 {
4281 tree wtype, htype;
4282
4283 if (INDIRECT_REF_P (type))
4284 type = TREE_TYPE (type);
4285 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4286 type = TREE_TYPE (type);
4287 wtype = va_list_type_node;
4288 htype = type;
4289 /* Treat structure va_list types. */
4290 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4291 htype = TREE_TYPE (htype);
4292 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4293 {
4294 /* If va_list is an array type, the argument may have decayed
4295 to a pointer type, e.g. by being passed to another function.
4296 In that case, unwrap both types so that we can compare the
4297 underlying records. */
4298 if (TREE_CODE (htype) == ARRAY_TYPE
4299 || POINTER_TYPE_P (htype))
4300 {
4301 wtype = TREE_TYPE (wtype);
4302 htype = TREE_TYPE (htype);
4303 }
4304 }
4305 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4306 return va_list_type_node;
4307
4308 return NULL_TREE;
4309 }
4310
4311 /* The "standard" implementation of va_start: just assign `nextarg' to
4312 the variable. */
4313
4314 void
4315 std_expand_builtin_va_start (tree valist, rtx nextarg)
4316 {
4317 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4318 convert_move (va_r, nextarg, 0);
4319 }
4320
4321 /* Expand EXP, a call to __builtin_va_start. */
4322
4323 static rtx
4324 expand_builtin_va_start (tree exp)
4325 {
4326 rtx nextarg;
4327 tree valist;
4328 location_t loc = EXPR_LOCATION (exp);
4329
4330 if (call_expr_nargs (exp) < 2)
4331 {
4332 error_at (loc, "too few arguments to function %<va_start%>");
4333 return const0_rtx;
4334 }
4335
4336 if (fold_builtin_next_arg (exp, true))
4337 return const0_rtx;
4338
4339 nextarg = expand_builtin_next_arg ();
4340 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4341
4342 if (targetm.expand_builtin_va_start)
4343 targetm.expand_builtin_va_start (valist, nextarg);
4344 else
4345 std_expand_builtin_va_start (valist, nextarg);
4346
4347 return const0_rtx;
4348 }
4349
4350 /* Expand EXP, a call to __builtin_va_end. */
4351
4352 static rtx
4353 expand_builtin_va_end (tree exp)
4354 {
4355 tree valist = CALL_EXPR_ARG (exp, 0);
4356
4357 /* Evaluate for side effects, if needed. I hate macros that don't
4358 do that. */
4359 if (TREE_SIDE_EFFECTS (valist))
4360 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4361
4362 return const0_rtx;
4363 }
4364
4365 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4366 builtin rather than just as an assignment in stdarg.h because of the
4367 nastiness of array-type va_list types. */
4368
4369 static rtx
4370 expand_builtin_va_copy (tree exp)
4371 {
4372 tree dst, src, t;
4373 location_t loc = EXPR_LOCATION (exp);
4374
4375 dst = CALL_EXPR_ARG (exp, 0);
4376 src = CALL_EXPR_ARG (exp, 1);
4377
4378 dst = stabilize_va_list_loc (loc, dst, 1);
4379 src = stabilize_va_list_loc (loc, src, 0);
4380
4381 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4382
4383 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4384 {
4385 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4386 TREE_SIDE_EFFECTS (t) = 1;
4387 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4388 }
4389 else
4390 {
4391 rtx dstb, srcb, size;
4392
4393 /* Evaluate to pointers. */
4394 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4395 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4396 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4397 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4398
4399 dstb = convert_memory_address (Pmode, dstb);
4400 srcb = convert_memory_address (Pmode, srcb);
4401
4402 /* "Dereference" to BLKmode memories. */
4403 dstb = gen_rtx_MEM (BLKmode, dstb);
4404 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4405 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4406 srcb = gen_rtx_MEM (BLKmode, srcb);
4407 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4408 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4409
4410 /* Copy. */
4411 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4412 }
4413
4414 return const0_rtx;
4415 }
4416
4417 /* Expand a call to one of the builtin functions __builtin_frame_address or
4418 __builtin_return_address. */
4419
4420 static rtx
4421 expand_builtin_frame_address (tree fndecl, tree exp)
4422 {
4423 /* The argument must be a nonnegative integer constant.
4424 It counts the number of frames to scan up the stack.
4425 The value is the return address saved in that frame. */
4426 if (call_expr_nargs (exp) == 0)
4427 /* Warning about missing arg was already issued. */
4428 return const0_rtx;
4429 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4430 {
4431 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4432 error ("invalid argument to %<__builtin_frame_address%>");
4433 else
4434 error ("invalid argument to %<__builtin_return_address%>");
4435 return const0_rtx;
4436 }
4437 else
4438 {
4439 rtx tem
4440 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4441 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4442
4443 /* Some ports cannot access arbitrary stack frames. */
4444 if (tem == NULL)
4445 {
4446 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4447 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4448 else
4449 warning (0, "unsupported argument to %<__builtin_return_address%>");
4450 return const0_rtx;
4451 }
4452
4453 /* For __builtin_frame_address, return what we've got. */
4454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4455 return tem;
4456
4457 if (!REG_P (tem)
4458 && ! CONSTANT_P (tem))
4459 tem = copy_addr_to_reg (tem);
4460 return tem;
4461 }
4462 }
4463
4464 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4465 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4466 is the same as for allocate_dynamic_stack_space. */
4467
4468 static rtx
4469 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4470 {
4471 rtx op0;
4472 rtx result;
4473 bool valid_arglist;
4474 unsigned int align;
4475 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4476 == BUILT_IN_ALLOCA_WITH_ALIGN);
4477
4478 valid_arglist
4479 = (alloca_with_align
4480 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4481 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4482
4483 if (!valid_arglist)
4484 return NULL_RTX;
4485
4486 /* Compute the argument. */
4487 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4488
4489 /* Compute the alignment. */
4490 align = (alloca_with_align
4491 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4492 : BIGGEST_ALIGNMENT);
4493
4494 /* Allocate the desired space. */
4495 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4496 result = convert_memory_address (ptr_mode, result);
4497
4498 return result;
4499 }
4500
4501 /* Expand a call to bswap builtin in EXP.
4502 Return NULL_RTX if a normal call should be emitted rather than expanding the
4503 function in-line. If convenient, the result should be placed in TARGET.
4504 SUBTARGET may be used as the target for computing one of EXP's operands. */
4505
4506 static rtx
4507 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4508 rtx subtarget)
4509 {
4510 tree arg;
4511 rtx op0;
4512
4513 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4514 return NULL_RTX;
4515
4516 arg = CALL_EXPR_ARG (exp, 0);
4517 op0 = expand_expr (arg,
4518 subtarget && GET_MODE (subtarget) == target_mode
4519 ? subtarget : NULL_RTX,
4520 target_mode, EXPAND_NORMAL);
4521 if (GET_MODE (op0) != target_mode)
4522 op0 = convert_to_mode (target_mode, op0, 1);
4523
4524 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4525
4526 gcc_assert (target);
4527
4528 return convert_to_mode (target_mode, target, 1);
4529 }
4530
4531 /* Expand a call to a unary builtin in EXP.
4532 Return NULL_RTX if a normal call should be emitted rather than expanding the
4533 function in-line. If convenient, the result should be placed in TARGET.
4534 SUBTARGET may be used as the target for computing one of EXP's operands. */
4535
4536 static rtx
4537 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4538 rtx subtarget, optab op_optab)
4539 {
4540 rtx op0;
4541
4542 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4543 return NULL_RTX;
4544
4545 /* Compute the argument. */
4546 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4547 (subtarget
4548 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4549 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4550 VOIDmode, EXPAND_NORMAL);
4551 /* Compute op, into TARGET if possible.
4552 Set TARGET to wherever the result comes back. */
4553 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4554 op_optab, op0, target, op_optab != clrsb_optab);
4555 gcc_assert (target);
4556
4557 return convert_to_mode (target_mode, target, 0);
4558 }
4559
4560 /* Expand a call to __builtin_expect. We just return our argument
4561 as the builtin_expect semantic should've been already executed by
4562 tree branch prediction pass. */
4563
4564 static rtx
4565 expand_builtin_expect (tree exp, rtx target)
4566 {
4567 tree arg;
4568
4569 if (call_expr_nargs (exp) < 2)
4570 return const0_rtx;
4571 arg = CALL_EXPR_ARG (exp, 0);
4572
4573 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4574 /* When guessing was done, the hints should be already stripped away. */
4575 gcc_assert (!flag_guess_branch_prob
4576 || optimize == 0 || seen_error ());
4577 return target;
4578 }
4579
4580 /* Expand a call to __builtin_assume_aligned. We just return our first
4581 argument as the builtin_assume_aligned semantic should've been already
4582 executed by CCP. */
4583
4584 static rtx
4585 expand_builtin_assume_aligned (tree exp, rtx target)
4586 {
4587 if (call_expr_nargs (exp) < 2)
4588 return const0_rtx;
4589 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4590 EXPAND_NORMAL);
4591 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4592 && (call_expr_nargs (exp) < 3
4593 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4594 return target;
4595 }
4596
4597 void
4598 expand_builtin_trap (void)
4599 {
4600 #ifdef HAVE_trap
4601 if (HAVE_trap)
4602 {
4603 rtx insn = emit_insn (gen_trap ());
4604 /* For trap insns when not accumulating outgoing args force
4605 REG_ARGS_SIZE note to prevent crossjumping of calls with
4606 different args sizes. */
4607 if (!ACCUMULATE_OUTGOING_ARGS)
4608 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4609 }
4610 else
4611 #endif
4612 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4613 emit_barrier ();
4614 }
4615
4616 /* Expand a call to __builtin_unreachable. We do nothing except emit
4617 a barrier saying that control flow will not pass here.
4618
4619 It is the responsibility of the program being compiled to ensure
4620 that control flow does never reach __builtin_unreachable. */
4621 static void
4622 expand_builtin_unreachable (void)
4623 {
4624 emit_barrier ();
4625 }
4626
4627 /* Expand EXP, a call to fabs, fabsf or fabsl.
4628 Return NULL_RTX if a normal call should be emitted rather than expanding
4629 the function inline. If convenient, the result should be placed
4630 in TARGET. SUBTARGET may be used as the target for computing
4631 the operand. */
4632
4633 static rtx
4634 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4635 {
4636 enum machine_mode mode;
4637 tree arg;
4638 rtx op0;
4639
4640 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4641 return NULL_RTX;
4642
4643 arg = CALL_EXPR_ARG (exp, 0);
4644 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4645 mode = TYPE_MODE (TREE_TYPE (arg));
4646 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4647 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4648 }
4649
4650 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4651 Return NULL is a normal call should be emitted rather than expanding the
4652 function inline. If convenient, the result should be placed in TARGET.
4653 SUBTARGET may be used as the target for computing the operand. */
4654
4655 static rtx
4656 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4657 {
4658 rtx op0, op1;
4659 tree arg;
4660
4661 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4662 return NULL_RTX;
4663
4664 arg = CALL_EXPR_ARG (exp, 0);
4665 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4666
4667 arg = CALL_EXPR_ARG (exp, 1);
4668 op1 = expand_normal (arg);
4669
4670 return expand_copysign (op0, op1, target);
4671 }
4672
4673 /* Expand a call to __builtin___clear_cache. */
4674
4675 static rtx
4676 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4677 {
4678 #ifndef HAVE_clear_cache
4679 #ifdef CLEAR_INSN_CACHE
4680 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4681 does something. Just do the default expansion to a call to
4682 __clear_cache(). */
4683 return NULL_RTX;
4684 #else
4685 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4686 does nothing. There is no need to call it. Do nothing. */
4687 return const0_rtx;
4688 #endif /* CLEAR_INSN_CACHE */
4689 #else
4690 /* We have a "clear_cache" insn, and it will handle everything. */
4691 tree begin, end;
4692 rtx begin_rtx, end_rtx;
4693
4694 /* We must not expand to a library call. If we did, any
4695 fallback library function in libgcc that might contain a call to
4696 __builtin___clear_cache() would recurse infinitely. */
4697 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4698 {
4699 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4700 return const0_rtx;
4701 }
4702
4703 if (HAVE_clear_cache)
4704 {
4705 struct expand_operand ops[2];
4706
4707 begin = CALL_EXPR_ARG (exp, 0);
4708 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4709
4710 end = CALL_EXPR_ARG (exp, 1);
4711 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4712
4713 create_address_operand (&ops[0], begin_rtx);
4714 create_address_operand (&ops[1], end_rtx);
4715 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4716 return const0_rtx;
4717 }
4718 return const0_rtx;
4719 #endif /* HAVE_clear_cache */
4720 }
4721
4722 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4723
4724 static rtx
4725 round_trampoline_addr (rtx tramp)
4726 {
4727 rtx temp, addend, mask;
4728
4729 /* If we don't need too much alignment, we'll have been guaranteed
4730 proper alignment by get_trampoline_type. */
4731 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4732 return tramp;
4733
4734 /* Round address up to desired boundary. */
4735 temp = gen_reg_rtx (Pmode);
4736 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4737 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4738
4739 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4740 temp, 0, OPTAB_LIB_WIDEN);
4741 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4742 temp, 0, OPTAB_LIB_WIDEN);
4743
4744 return tramp;
4745 }
4746
4747 static rtx
4748 expand_builtin_init_trampoline (tree exp, bool onstack)
4749 {
4750 tree t_tramp, t_func, t_chain;
4751 rtx m_tramp, r_tramp, r_chain, tmp;
4752
4753 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4754 POINTER_TYPE, VOID_TYPE))
4755 return NULL_RTX;
4756
4757 t_tramp = CALL_EXPR_ARG (exp, 0);
4758 t_func = CALL_EXPR_ARG (exp, 1);
4759 t_chain = CALL_EXPR_ARG (exp, 2);
4760
4761 r_tramp = expand_normal (t_tramp);
4762 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4763 MEM_NOTRAP_P (m_tramp) = 1;
4764
4765 /* If ONSTACK, the TRAMP argument should be the address of a field
4766 within the local function's FRAME decl. Either way, let's see if
4767 we can fill in the MEM_ATTRs for this memory. */
4768 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4769 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4770
4771 /* Creator of a heap trampoline is responsible for making sure the
4772 address is aligned to at least STACK_BOUNDARY. Normally malloc
4773 will ensure this anyhow. */
4774 tmp = round_trampoline_addr (r_tramp);
4775 if (tmp != r_tramp)
4776 {
4777 m_tramp = change_address (m_tramp, BLKmode, tmp);
4778 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4779 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4780 }
4781
4782 /* The FUNC argument should be the address of the nested function.
4783 Extract the actual function decl to pass to the hook. */
4784 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4785 t_func = TREE_OPERAND (t_func, 0);
4786 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4787
4788 r_chain = expand_normal (t_chain);
4789
4790 /* Generate insns to initialize the trampoline. */
4791 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4792
4793 if (onstack)
4794 {
4795 trampolines_created = 1;
4796
4797 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4798 "trampoline generated for nested function %qD", t_func);
4799 }
4800
4801 return const0_rtx;
4802 }
4803
4804 static rtx
4805 expand_builtin_adjust_trampoline (tree exp)
4806 {
4807 rtx tramp;
4808
4809 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4810 return NULL_RTX;
4811
4812 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4813 tramp = round_trampoline_addr (tramp);
4814 if (targetm.calls.trampoline_adjust_address)
4815 tramp = targetm.calls.trampoline_adjust_address (tramp);
4816
4817 return tramp;
4818 }
4819
4820 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4821 function. The function first checks whether the back end provides
4822 an insn to implement signbit for the respective mode. If not, it
4823 checks whether the floating point format of the value is such that
4824 the sign bit can be extracted. If that is not the case, the
4825 function returns NULL_RTX to indicate that a normal call should be
4826 emitted rather than expanding the function in-line. EXP is the
4827 expression that is a call to the builtin function; if convenient,
4828 the result should be placed in TARGET. */
4829 static rtx
4830 expand_builtin_signbit (tree exp, rtx target)
4831 {
4832 const struct real_format *fmt;
4833 enum machine_mode fmode, imode, rmode;
4834 tree arg;
4835 int word, bitpos;
4836 enum insn_code icode;
4837 rtx temp;
4838 location_t loc = EXPR_LOCATION (exp);
4839
4840 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4841 return NULL_RTX;
4842
4843 arg = CALL_EXPR_ARG (exp, 0);
4844 fmode = TYPE_MODE (TREE_TYPE (arg));
4845 rmode = TYPE_MODE (TREE_TYPE (exp));
4846 fmt = REAL_MODE_FORMAT (fmode);
4847
4848 arg = builtin_save_expr (arg);
4849
4850 /* Expand the argument yielding a RTX expression. */
4851 temp = expand_normal (arg);
4852
4853 /* Check if the back end provides an insn that handles signbit for the
4854 argument's mode. */
4855 icode = optab_handler (signbit_optab, fmode);
4856 if (icode != CODE_FOR_nothing)
4857 {
4858 rtx last = get_last_insn ();
4859 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4860 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4861 return target;
4862 delete_insns_since (last);
4863 }
4864
4865 /* For floating point formats without a sign bit, implement signbit
4866 as "ARG < 0.0". */
4867 bitpos = fmt->signbit_ro;
4868 if (bitpos < 0)
4869 {
4870 /* But we can't do this if the format supports signed zero. */
4871 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4872 return NULL_RTX;
4873
4874 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4875 build_real (TREE_TYPE (arg), dconst0));
4876 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4877 }
4878
4879 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4880 {
4881 imode = int_mode_for_mode (fmode);
4882 if (imode == BLKmode)
4883 return NULL_RTX;
4884 temp = gen_lowpart (imode, temp);
4885 }
4886 else
4887 {
4888 imode = word_mode;
4889 /* Handle targets with different FP word orders. */
4890 if (FLOAT_WORDS_BIG_ENDIAN)
4891 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4892 else
4893 word = bitpos / BITS_PER_WORD;
4894 temp = operand_subword_force (temp, word, fmode);
4895 bitpos = bitpos % BITS_PER_WORD;
4896 }
4897
4898 /* Force the intermediate word_mode (or narrower) result into a
4899 register. This avoids attempting to create paradoxical SUBREGs
4900 of floating point modes below. */
4901 temp = force_reg (imode, temp);
4902
4903 /* If the bitpos is within the "result mode" lowpart, the operation
4904 can be implement with a single bitwise AND. Otherwise, we need
4905 a right shift and an AND. */
4906
4907 if (bitpos < GET_MODE_BITSIZE (rmode))
4908 {
4909 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4910
4911 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4912 temp = gen_lowpart (rmode, temp);
4913 temp = expand_binop (rmode, and_optab, temp,
4914 immed_wide_int_const (mask, rmode),
4915 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4916 }
4917 else
4918 {
4919 /* Perform a logical right shift to place the signbit in the least
4920 significant bit, then truncate the result to the desired mode
4921 and mask just this bit. */
4922 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4923 temp = gen_lowpart (rmode, temp);
4924 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4925 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4926 }
4927
4928 return temp;
4929 }
4930
4931 /* Expand fork or exec calls. TARGET is the desired target of the
4932 call. EXP is the call. FN is the
4933 identificator of the actual function. IGNORE is nonzero if the
4934 value is to be ignored. */
4935
4936 static rtx
4937 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4938 {
4939 tree id, decl;
4940 tree call;
4941
4942 /* If we are not profiling, just call the function. */
4943 if (!profile_arc_flag)
4944 return NULL_RTX;
4945
4946 /* Otherwise call the wrapper. This should be equivalent for the rest of
4947 compiler, so the code does not diverge, and the wrapper may run the
4948 code necessary for keeping the profiling sane. */
4949
4950 switch (DECL_FUNCTION_CODE (fn))
4951 {
4952 case BUILT_IN_FORK:
4953 id = get_identifier ("__gcov_fork");
4954 break;
4955
4956 case BUILT_IN_EXECL:
4957 id = get_identifier ("__gcov_execl");
4958 break;
4959
4960 case BUILT_IN_EXECV:
4961 id = get_identifier ("__gcov_execv");
4962 break;
4963
4964 case BUILT_IN_EXECLP:
4965 id = get_identifier ("__gcov_execlp");
4966 break;
4967
4968 case BUILT_IN_EXECLE:
4969 id = get_identifier ("__gcov_execle");
4970 break;
4971
4972 case BUILT_IN_EXECVP:
4973 id = get_identifier ("__gcov_execvp");
4974 break;
4975
4976 case BUILT_IN_EXECVE:
4977 id = get_identifier ("__gcov_execve");
4978 break;
4979
4980 default:
4981 gcc_unreachable ();
4982 }
4983
4984 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4985 FUNCTION_DECL, id, TREE_TYPE (fn));
4986 DECL_EXTERNAL (decl) = 1;
4987 TREE_PUBLIC (decl) = 1;
4988 DECL_ARTIFICIAL (decl) = 1;
4989 TREE_NOTHROW (decl) = 1;
4990 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4991 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4992 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4993 return expand_call (call, target, ignore);
4994 }
4995
4996
4997 \f
4998 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4999 the pointer in these functions is void*, the tree optimizers may remove
5000 casts. The mode computed in expand_builtin isn't reliable either, due
5001 to __sync_bool_compare_and_swap.
5002
5003 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5004 group of builtins. This gives us log2 of the mode size. */
5005
5006 static inline enum machine_mode
5007 get_builtin_sync_mode (int fcode_diff)
5008 {
5009 /* The size is not negotiable, so ask not to get BLKmode in return
5010 if the target indicates that a smaller size would be better. */
5011 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5012 }
5013
5014 /* Expand the memory expression LOC and return the appropriate memory operand
5015 for the builtin_sync operations. */
5016
5017 static rtx
5018 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5019 {
5020 rtx addr, mem;
5021
5022 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5023 addr = convert_memory_address (Pmode, addr);
5024
5025 /* Note that we explicitly do not want any alias information for this
5026 memory, so that we kill all other live memories. Otherwise we don't
5027 satisfy the full barrier semantics of the intrinsic. */
5028 mem = validize_mem (gen_rtx_MEM (mode, addr));
5029
5030 /* The alignment needs to be at least according to that of the mode. */
5031 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5032 get_pointer_alignment (loc)));
5033 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5034 MEM_VOLATILE_P (mem) = 1;
5035
5036 return mem;
5037 }
5038
5039 /* Make sure an argument is in the right mode.
5040 EXP is the tree argument.
5041 MODE is the mode it should be in. */
5042
5043 static rtx
5044 expand_expr_force_mode (tree exp, enum machine_mode mode)
5045 {
5046 rtx val;
5047 enum machine_mode old_mode;
5048
5049 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5050 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5051 of CONST_INTs, where we know the old_mode only from the call argument. */
5052
5053 old_mode = GET_MODE (val);
5054 if (old_mode == VOIDmode)
5055 old_mode = TYPE_MODE (TREE_TYPE (exp));
5056 val = convert_modes (mode, old_mode, val, 1);
5057 return val;
5058 }
5059
5060
5061 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5062 EXP is the CALL_EXPR. CODE is the rtx code
5063 that corresponds to the arithmetic or logical operation from the name;
5064 an exception here is that NOT actually means NAND. TARGET is an optional
5065 place for us to store the results; AFTER is true if this is the
5066 fetch_and_xxx form. */
5067
5068 static rtx
5069 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5070 enum rtx_code code, bool after,
5071 rtx target)
5072 {
5073 rtx val, mem;
5074 location_t loc = EXPR_LOCATION (exp);
5075
5076 if (code == NOT && warn_sync_nand)
5077 {
5078 tree fndecl = get_callee_fndecl (exp);
5079 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5080
5081 static bool warned_f_a_n, warned_n_a_f;
5082
5083 switch (fcode)
5084 {
5085 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5086 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5087 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5088 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5089 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5090 if (warned_f_a_n)
5091 break;
5092
5093 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5094 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5095 warned_f_a_n = true;
5096 break;
5097
5098 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5099 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5100 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5101 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5102 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5103 if (warned_n_a_f)
5104 break;
5105
5106 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5107 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5108 warned_n_a_f = true;
5109 break;
5110
5111 default:
5112 gcc_unreachable ();
5113 }
5114 }
5115
5116 /* Expand the operands. */
5117 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5118 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5119
5120 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5121 after);
5122 }
5123
5124 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5125 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5126 true if this is the boolean form. TARGET is a place for us to store the
5127 results; this is NOT optional if IS_BOOL is true. */
5128
5129 static rtx
5130 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5131 bool is_bool, rtx target)
5132 {
5133 rtx old_val, new_val, mem;
5134 rtx *pbool, *poval;
5135
5136 /* Expand the operands. */
5137 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5138 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5139 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5140
5141 pbool = poval = NULL;
5142 if (target != const0_rtx)
5143 {
5144 if (is_bool)
5145 pbool = &target;
5146 else
5147 poval = &target;
5148 }
5149 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5150 false, MEMMODEL_SEQ_CST,
5151 MEMMODEL_SEQ_CST))
5152 return NULL_RTX;
5153
5154 return target;
5155 }
5156
5157 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5158 general form is actually an atomic exchange, and some targets only
5159 support a reduced form with the second argument being a constant 1.
5160 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5161 the results. */
5162
5163 static rtx
5164 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5165 rtx target)
5166 {
5167 rtx val, mem;
5168
5169 /* Expand the operands. */
5170 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5171 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5172
5173 return expand_sync_lock_test_and_set (target, mem, val);
5174 }
5175
5176 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5177
5178 static void
5179 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5180 {
5181 rtx mem;
5182
5183 /* Expand the operands. */
5184 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5185
5186 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5187 }
5188
5189 /* Given an integer representing an ``enum memmodel'', verify its
5190 correctness and return the memory model enum. */
5191
5192 static enum memmodel
5193 get_memmodel (tree exp)
5194 {
5195 rtx op;
5196 unsigned HOST_WIDE_INT val;
5197
5198 /* If the parameter is not a constant, it's a run time value so we'll just
5199 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5200 if (TREE_CODE (exp) != INTEGER_CST)
5201 return MEMMODEL_SEQ_CST;
5202
5203 op = expand_normal (exp);
5204
5205 val = INTVAL (op);
5206 if (targetm.memmodel_check)
5207 val = targetm.memmodel_check (val);
5208 else if (val & ~MEMMODEL_MASK)
5209 {
5210 warning (OPT_Winvalid_memory_model,
5211 "Unknown architecture specifier in memory model to builtin.");
5212 return MEMMODEL_SEQ_CST;
5213 }
5214
5215 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5216 {
5217 warning (OPT_Winvalid_memory_model,
5218 "invalid memory model argument to builtin");
5219 return MEMMODEL_SEQ_CST;
5220 }
5221
5222 return (enum memmodel) val;
5223 }
5224
5225 /* Expand the __atomic_exchange intrinsic:
5226 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5227 EXP is the CALL_EXPR.
5228 TARGET is an optional place for us to store the results. */
5229
5230 static rtx
5231 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5232 {
5233 rtx val, mem;
5234 enum memmodel model;
5235
5236 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5237 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5238 {
5239 error ("invalid memory model for %<__atomic_exchange%>");
5240 return NULL_RTX;
5241 }
5242
5243 if (!flag_inline_atomics)
5244 return NULL_RTX;
5245
5246 /* Expand the operands. */
5247 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5248 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5249
5250 return expand_atomic_exchange (target, mem, val, model);
5251 }
5252
5253 /* Expand the __atomic_compare_exchange intrinsic:
5254 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5255 TYPE desired, BOOL weak,
5256 enum memmodel success,
5257 enum memmodel failure)
5258 EXP is the CALL_EXPR.
5259 TARGET is an optional place for us to store the results. */
5260
5261 static rtx
5262 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5263 rtx target)
5264 {
5265 rtx expect, desired, mem, oldval, label;
5266 enum memmodel success, failure;
5267 tree weak;
5268 bool is_weak;
5269
5270 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5271 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5272
5273 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5274 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5275 {
5276 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5277 return NULL_RTX;
5278 }
5279
5280 if (failure > success)
5281 {
5282 error ("failure memory model cannot be stronger than success "
5283 "memory model for %<__atomic_compare_exchange%>");
5284 return NULL_RTX;
5285 }
5286
5287 if (!flag_inline_atomics)
5288 return NULL_RTX;
5289
5290 /* Expand the operands. */
5291 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5292
5293 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5294 expect = convert_memory_address (Pmode, expect);
5295 expect = gen_rtx_MEM (mode, expect);
5296 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5297
5298 weak = CALL_EXPR_ARG (exp, 3);
5299 is_weak = false;
5300 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5301 is_weak = true;
5302
5303 if (target == const0_rtx)
5304 target = NULL;
5305
5306 /* Lest the rtl backend create a race condition with an imporoper store
5307 to memory, always create a new pseudo for OLDVAL. */
5308 oldval = NULL;
5309
5310 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5311 is_weak, success, failure))
5312 return NULL_RTX;
5313
5314 /* Conditionally store back to EXPECT, lest we create a race condition
5315 with an improper store to memory. */
5316 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5317 the normal case where EXPECT is totally private, i.e. a register. At
5318 which point the store can be unconditional. */
5319 label = gen_label_rtx ();
5320 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5321 emit_move_insn (expect, oldval);
5322 emit_label (label);
5323
5324 return target;
5325 }
5326
5327 /* Expand the __atomic_load intrinsic:
5328 TYPE __atomic_load (TYPE *object, enum memmodel)
5329 EXP is the CALL_EXPR.
5330 TARGET is an optional place for us to store the results. */
5331
5332 static rtx
5333 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5334 {
5335 rtx mem;
5336 enum memmodel model;
5337
5338 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5339 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5340 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5341 {
5342 error ("invalid memory model for %<__atomic_load%>");
5343 return NULL_RTX;
5344 }
5345
5346 if (!flag_inline_atomics)
5347 return NULL_RTX;
5348
5349 /* Expand the operand. */
5350 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5351
5352 return expand_atomic_load (target, mem, model);
5353 }
5354
5355
5356 /* Expand the __atomic_store intrinsic:
5357 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5358 EXP is the CALL_EXPR.
5359 TARGET is an optional place for us to store the results. */
5360
5361 static rtx
5362 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5363 {
5364 rtx mem, val;
5365 enum memmodel model;
5366
5367 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5368 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5369 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5370 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5371 {
5372 error ("invalid memory model for %<__atomic_store%>");
5373 return NULL_RTX;
5374 }
5375
5376 if (!flag_inline_atomics)
5377 return NULL_RTX;
5378
5379 /* Expand the operands. */
5380 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5381 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5382
5383 return expand_atomic_store (mem, val, model, false);
5384 }
5385
5386 /* Expand the __atomic_fetch_XXX intrinsic:
5387 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5388 EXP is the CALL_EXPR.
5389 TARGET is an optional place for us to store the results.
5390 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5391 FETCH_AFTER is true if returning the result of the operation.
5392 FETCH_AFTER is false if returning the value before the operation.
5393 IGNORE is true if the result is not used.
5394 EXT_CALL is the correct builtin for an external call if this cannot be
5395 resolved to an instruction sequence. */
5396
5397 static rtx
5398 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5399 enum rtx_code code, bool fetch_after,
5400 bool ignore, enum built_in_function ext_call)
5401 {
5402 rtx val, mem, ret;
5403 enum memmodel model;
5404 tree fndecl;
5405 tree addr;
5406
5407 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5408
5409 /* Expand the operands. */
5410 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5411 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5412
5413 /* Only try generating instructions if inlining is turned on. */
5414 if (flag_inline_atomics)
5415 {
5416 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5417 if (ret)
5418 return ret;
5419 }
5420
5421 /* Return if a different routine isn't needed for the library call. */
5422 if (ext_call == BUILT_IN_NONE)
5423 return NULL_RTX;
5424
5425 /* Change the call to the specified function. */
5426 fndecl = get_callee_fndecl (exp);
5427 addr = CALL_EXPR_FN (exp);
5428 STRIP_NOPS (addr);
5429
5430 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5431 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5432
5433 /* Expand the call here so we can emit trailing code. */
5434 ret = expand_call (exp, target, ignore);
5435
5436 /* Replace the original function just in case it matters. */
5437 TREE_OPERAND (addr, 0) = fndecl;
5438
5439 /* Then issue the arithmetic correction to return the right result. */
5440 if (!ignore)
5441 {
5442 if (code == NOT)
5443 {
5444 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5445 OPTAB_LIB_WIDEN);
5446 ret = expand_simple_unop (mode, NOT, ret, target, true);
5447 }
5448 else
5449 ret = expand_simple_binop (mode, code, ret, val, target, true,
5450 OPTAB_LIB_WIDEN);
5451 }
5452 return ret;
5453 }
5454
5455
5456 #ifndef HAVE_atomic_clear
5457 # define HAVE_atomic_clear 0
5458 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5459 #endif
5460
5461 /* Expand an atomic clear operation.
5462 void _atomic_clear (BOOL *obj, enum memmodel)
5463 EXP is the call expression. */
5464
5465 static rtx
5466 expand_builtin_atomic_clear (tree exp)
5467 {
5468 enum machine_mode mode;
5469 rtx mem, ret;
5470 enum memmodel model;
5471
5472 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5473 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5474 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5475
5476 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5477 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5478 {
5479 error ("invalid memory model for %<__atomic_store%>");
5480 return const0_rtx;
5481 }
5482
5483 if (HAVE_atomic_clear)
5484 {
5485 emit_insn (gen_atomic_clear (mem, model));
5486 return const0_rtx;
5487 }
5488
5489 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5490 Failing that, a store is issued by __atomic_store. The only way this can
5491 fail is if the bool type is larger than a word size. Unlikely, but
5492 handle it anyway for completeness. Assume a single threaded model since
5493 there is no atomic support in this case, and no barriers are required. */
5494 ret = expand_atomic_store (mem, const0_rtx, model, true);
5495 if (!ret)
5496 emit_move_insn (mem, const0_rtx);
5497 return const0_rtx;
5498 }
5499
5500 /* Expand an atomic test_and_set operation.
5501 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5502 EXP is the call expression. */
5503
5504 static rtx
5505 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5506 {
5507 rtx mem;
5508 enum memmodel model;
5509 enum machine_mode mode;
5510
5511 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5512 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5513 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5514
5515 return expand_atomic_test_and_set (target, mem, model);
5516 }
5517
5518
5519 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5520 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5521
5522 static tree
5523 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5524 {
5525 int size;
5526 enum machine_mode mode;
5527 unsigned int mode_align, type_align;
5528
5529 if (TREE_CODE (arg0) != INTEGER_CST)
5530 return NULL_TREE;
5531
5532 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5533 mode = mode_for_size (size, MODE_INT, 0);
5534 mode_align = GET_MODE_ALIGNMENT (mode);
5535
5536 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5537 type_align = mode_align;
5538 else
5539 {
5540 tree ttype = TREE_TYPE (arg1);
5541
5542 /* This function is usually invoked and folded immediately by the front
5543 end before anything else has a chance to look at it. The pointer
5544 parameter at this point is usually cast to a void *, so check for that
5545 and look past the cast. */
5546 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5547 && VOID_TYPE_P (TREE_TYPE (ttype)))
5548 arg1 = TREE_OPERAND (arg1, 0);
5549
5550 ttype = TREE_TYPE (arg1);
5551 gcc_assert (POINTER_TYPE_P (ttype));
5552
5553 /* Get the underlying type of the object. */
5554 ttype = TREE_TYPE (ttype);
5555 type_align = TYPE_ALIGN (ttype);
5556 }
5557
5558 /* If the object has smaller alignment, the the lock free routines cannot
5559 be used. */
5560 if (type_align < mode_align)
5561 return boolean_false_node;
5562
5563 /* Check if a compare_and_swap pattern exists for the mode which represents
5564 the required size. The pattern is not allowed to fail, so the existence
5565 of the pattern indicates support is present. */
5566 if (can_compare_and_swap_p (mode, true))
5567 return boolean_true_node;
5568 else
5569 return boolean_false_node;
5570 }
5571
5572 /* Return true if the parameters to call EXP represent an object which will
5573 always generate lock free instructions. The first argument represents the
5574 size of the object, and the second parameter is a pointer to the object
5575 itself. If NULL is passed for the object, then the result is based on
5576 typical alignment for an object of the specified size. Otherwise return
5577 false. */
5578
5579 static rtx
5580 expand_builtin_atomic_always_lock_free (tree exp)
5581 {
5582 tree size;
5583 tree arg0 = CALL_EXPR_ARG (exp, 0);
5584 tree arg1 = CALL_EXPR_ARG (exp, 1);
5585
5586 if (TREE_CODE (arg0) != INTEGER_CST)
5587 {
5588 error ("non-constant argument 1 to __atomic_always_lock_free");
5589 return const0_rtx;
5590 }
5591
5592 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5593 if (size == boolean_true_node)
5594 return const1_rtx;
5595 return const0_rtx;
5596 }
5597
5598 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5599 is lock free on this architecture. */
5600
5601 static tree
5602 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5603 {
5604 if (!flag_inline_atomics)
5605 return NULL_TREE;
5606
5607 /* If it isn't always lock free, don't generate a result. */
5608 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5609 return boolean_true_node;
5610
5611 return NULL_TREE;
5612 }
5613
5614 /* Return true if the parameters to call EXP represent an object which will
5615 always generate lock free instructions. The first argument represents the
5616 size of the object, and the second parameter is a pointer to the object
5617 itself. If NULL is passed for the object, then the result is based on
5618 typical alignment for an object of the specified size. Otherwise return
5619 NULL*/
5620
5621 static rtx
5622 expand_builtin_atomic_is_lock_free (tree exp)
5623 {
5624 tree size;
5625 tree arg0 = CALL_EXPR_ARG (exp, 0);
5626 tree arg1 = CALL_EXPR_ARG (exp, 1);
5627
5628 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5629 {
5630 error ("non-integer argument 1 to __atomic_is_lock_free");
5631 return NULL_RTX;
5632 }
5633
5634 if (!flag_inline_atomics)
5635 return NULL_RTX;
5636
5637 /* If the value is known at compile time, return the RTX for it. */
5638 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5639 if (size == boolean_true_node)
5640 return const1_rtx;
5641
5642 return NULL_RTX;
5643 }
5644
5645 /* Expand the __atomic_thread_fence intrinsic:
5646 void __atomic_thread_fence (enum memmodel)
5647 EXP is the CALL_EXPR. */
5648
5649 static void
5650 expand_builtin_atomic_thread_fence (tree exp)
5651 {
5652 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5653 expand_mem_thread_fence (model);
5654 }
5655
5656 /* Expand the __atomic_signal_fence intrinsic:
5657 void __atomic_signal_fence (enum memmodel)
5658 EXP is the CALL_EXPR. */
5659
5660 static void
5661 expand_builtin_atomic_signal_fence (tree exp)
5662 {
5663 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5664 expand_mem_signal_fence (model);
5665 }
5666
5667 /* Expand the __sync_synchronize intrinsic. */
5668
5669 static void
5670 expand_builtin_sync_synchronize (void)
5671 {
5672 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5673 }
5674
5675 static rtx
5676 expand_builtin_thread_pointer (tree exp, rtx target)
5677 {
5678 enum insn_code icode;
5679 if (!validate_arglist (exp, VOID_TYPE))
5680 return const0_rtx;
5681 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5682 if (icode != CODE_FOR_nothing)
5683 {
5684 struct expand_operand op;
5685 /* If the target is not sutitable then create a new target. */
5686 if (target == NULL_RTX
5687 || !REG_P (target)
5688 || GET_MODE (target) != Pmode)
5689 target = gen_reg_rtx (Pmode);
5690 create_output_operand (&op, target, Pmode);
5691 expand_insn (icode, 1, &op);
5692 return target;
5693 }
5694 error ("__builtin_thread_pointer is not supported on this target");
5695 return const0_rtx;
5696 }
5697
5698 static void
5699 expand_builtin_set_thread_pointer (tree exp)
5700 {
5701 enum insn_code icode;
5702 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5703 return;
5704 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5705 if (icode != CODE_FOR_nothing)
5706 {
5707 struct expand_operand op;
5708 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5709 Pmode, EXPAND_NORMAL);
5710 create_input_operand (&op, val, Pmode);
5711 expand_insn (icode, 1, &op);
5712 return;
5713 }
5714 error ("__builtin_set_thread_pointer is not supported on this target");
5715 }
5716
5717 \f
5718 /* Emit code to restore the current value of stack. */
5719
5720 static void
5721 expand_stack_restore (tree var)
5722 {
5723 rtx prev, sa = expand_normal (var);
5724
5725 sa = convert_memory_address (Pmode, sa);
5726
5727 prev = get_last_insn ();
5728 emit_stack_restore (SAVE_BLOCK, sa);
5729 fixup_args_size_notes (prev, get_last_insn (), 0);
5730 }
5731
5732
5733 /* Emit code to save the current value of stack. */
5734
5735 static rtx
5736 expand_stack_save (void)
5737 {
5738 rtx ret = NULL_RTX;
5739
5740 do_pending_stack_adjust ();
5741 emit_stack_save (SAVE_BLOCK, &ret);
5742 return ret;
5743 }
5744
5745 /* Expand an expression EXP that calls a built-in function,
5746 with result going to TARGET if that's convenient
5747 (and in mode MODE if that's convenient).
5748 SUBTARGET may be used as the target for computing one of EXP's operands.
5749 IGNORE is nonzero if the value is to be ignored. */
5750
5751 rtx
5752 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5753 int ignore)
5754 {
5755 tree fndecl = get_callee_fndecl (exp);
5756 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5757 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5758 int flags;
5759
5760 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5761 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5762
5763 /* When not optimizing, generate calls to library functions for a certain
5764 set of builtins. */
5765 if (!optimize
5766 && !called_as_built_in (fndecl)
5767 && fcode != BUILT_IN_FORK
5768 && fcode != BUILT_IN_EXECL
5769 && fcode != BUILT_IN_EXECV
5770 && fcode != BUILT_IN_EXECLP
5771 && fcode != BUILT_IN_EXECLE
5772 && fcode != BUILT_IN_EXECVP
5773 && fcode != BUILT_IN_EXECVE
5774 && fcode != BUILT_IN_ALLOCA
5775 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5776 && fcode != BUILT_IN_FREE)
5777 return expand_call (exp, target, ignore);
5778
5779 /* The built-in function expanders test for target == const0_rtx
5780 to determine whether the function's result will be ignored. */
5781 if (ignore)
5782 target = const0_rtx;
5783
5784 /* If the result of a pure or const built-in function is ignored, and
5785 none of its arguments are volatile, we can avoid expanding the
5786 built-in call and just evaluate the arguments for side-effects. */
5787 if (target == const0_rtx
5788 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5789 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5790 {
5791 bool volatilep = false;
5792 tree arg;
5793 call_expr_arg_iterator iter;
5794
5795 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5796 if (TREE_THIS_VOLATILE (arg))
5797 {
5798 volatilep = true;
5799 break;
5800 }
5801
5802 if (! volatilep)
5803 {
5804 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5805 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5806 return const0_rtx;
5807 }
5808 }
5809
5810 switch (fcode)
5811 {
5812 CASE_FLT_FN (BUILT_IN_FABS):
5813 case BUILT_IN_FABSD32:
5814 case BUILT_IN_FABSD64:
5815 case BUILT_IN_FABSD128:
5816 target = expand_builtin_fabs (exp, target, subtarget);
5817 if (target)
5818 return target;
5819 break;
5820
5821 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5822 target = expand_builtin_copysign (exp, target, subtarget);
5823 if (target)
5824 return target;
5825 break;
5826
5827 /* Just do a normal library call if we were unable to fold
5828 the values. */
5829 CASE_FLT_FN (BUILT_IN_CABS):
5830 break;
5831
5832 CASE_FLT_FN (BUILT_IN_EXP):
5833 CASE_FLT_FN (BUILT_IN_EXP10):
5834 CASE_FLT_FN (BUILT_IN_POW10):
5835 CASE_FLT_FN (BUILT_IN_EXP2):
5836 CASE_FLT_FN (BUILT_IN_EXPM1):
5837 CASE_FLT_FN (BUILT_IN_LOGB):
5838 CASE_FLT_FN (BUILT_IN_LOG):
5839 CASE_FLT_FN (BUILT_IN_LOG10):
5840 CASE_FLT_FN (BUILT_IN_LOG2):
5841 CASE_FLT_FN (BUILT_IN_LOG1P):
5842 CASE_FLT_FN (BUILT_IN_TAN):
5843 CASE_FLT_FN (BUILT_IN_ASIN):
5844 CASE_FLT_FN (BUILT_IN_ACOS):
5845 CASE_FLT_FN (BUILT_IN_ATAN):
5846 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5847 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5848 because of possible accuracy problems. */
5849 if (! flag_unsafe_math_optimizations)
5850 break;
5851 CASE_FLT_FN (BUILT_IN_SQRT):
5852 CASE_FLT_FN (BUILT_IN_FLOOR):
5853 CASE_FLT_FN (BUILT_IN_CEIL):
5854 CASE_FLT_FN (BUILT_IN_TRUNC):
5855 CASE_FLT_FN (BUILT_IN_ROUND):
5856 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5857 CASE_FLT_FN (BUILT_IN_RINT):
5858 target = expand_builtin_mathfn (exp, target, subtarget);
5859 if (target)
5860 return target;
5861 break;
5862
5863 CASE_FLT_FN (BUILT_IN_FMA):
5864 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5865 if (target)
5866 return target;
5867 break;
5868
5869 CASE_FLT_FN (BUILT_IN_ILOGB):
5870 if (! flag_unsafe_math_optimizations)
5871 break;
5872 CASE_FLT_FN (BUILT_IN_ISINF):
5873 CASE_FLT_FN (BUILT_IN_FINITE):
5874 case BUILT_IN_ISFINITE:
5875 case BUILT_IN_ISNORMAL:
5876 target = expand_builtin_interclass_mathfn (exp, target);
5877 if (target)
5878 return target;
5879 break;
5880
5881 CASE_FLT_FN (BUILT_IN_ICEIL):
5882 CASE_FLT_FN (BUILT_IN_LCEIL):
5883 CASE_FLT_FN (BUILT_IN_LLCEIL):
5884 CASE_FLT_FN (BUILT_IN_LFLOOR):
5885 CASE_FLT_FN (BUILT_IN_IFLOOR):
5886 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5887 target = expand_builtin_int_roundingfn (exp, target);
5888 if (target)
5889 return target;
5890 break;
5891
5892 CASE_FLT_FN (BUILT_IN_IRINT):
5893 CASE_FLT_FN (BUILT_IN_LRINT):
5894 CASE_FLT_FN (BUILT_IN_LLRINT):
5895 CASE_FLT_FN (BUILT_IN_IROUND):
5896 CASE_FLT_FN (BUILT_IN_LROUND):
5897 CASE_FLT_FN (BUILT_IN_LLROUND):
5898 target = expand_builtin_int_roundingfn_2 (exp, target);
5899 if (target)
5900 return target;
5901 break;
5902
5903 CASE_FLT_FN (BUILT_IN_POWI):
5904 target = expand_builtin_powi (exp, target);
5905 if (target)
5906 return target;
5907 break;
5908
5909 CASE_FLT_FN (BUILT_IN_ATAN2):
5910 CASE_FLT_FN (BUILT_IN_LDEXP):
5911 CASE_FLT_FN (BUILT_IN_SCALB):
5912 CASE_FLT_FN (BUILT_IN_SCALBN):
5913 CASE_FLT_FN (BUILT_IN_SCALBLN):
5914 if (! flag_unsafe_math_optimizations)
5915 break;
5916
5917 CASE_FLT_FN (BUILT_IN_FMOD):
5918 CASE_FLT_FN (BUILT_IN_REMAINDER):
5919 CASE_FLT_FN (BUILT_IN_DREM):
5920 CASE_FLT_FN (BUILT_IN_POW):
5921 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5922 if (target)
5923 return target;
5924 break;
5925
5926 CASE_FLT_FN (BUILT_IN_CEXPI):
5927 target = expand_builtin_cexpi (exp, target);
5928 gcc_assert (target);
5929 return target;
5930
5931 CASE_FLT_FN (BUILT_IN_SIN):
5932 CASE_FLT_FN (BUILT_IN_COS):
5933 if (! flag_unsafe_math_optimizations)
5934 break;
5935 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5936 if (target)
5937 return target;
5938 break;
5939
5940 CASE_FLT_FN (BUILT_IN_SINCOS):
5941 if (! flag_unsafe_math_optimizations)
5942 break;
5943 target = expand_builtin_sincos (exp);
5944 if (target)
5945 return target;
5946 break;
5947
5948 case BUILT_IN_APPLY_ARGS:
5949 return expand_builtin_apply_args ();
5950
5951 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5952 FUNCTION with a copy of the parameters described by
5953 ARGUMENTS, and ARGSIZE. It returns a block of memory
5954 allocated on the stack into which is stored all the registers
5955 that might possibly be used for returning the result of a
5956 function. ARGUMENTS is the value returned by
5957 __builtin_apply_args. ARGSIZE is the number of bytes of
5958 arguments that must be copied. ??? How should this value be
5959 computed? We'll also need a safe worst case value for varargs
5960 functions. */
5961 case BUILT_IN_APPLY:
5962 if (!validate_arglist (exp, POINTER_TYPE,
5963 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5964 && !validate_arglist (exp, REFERENCE_TYPE,
5965 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5966 return const0_rtx;
5967 else
5968 {
5969 rtx ops[3];
5970
5971 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5972 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5973 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5974
5975 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5976 }
5977
5978 /* __builtin_return (RESULT) causes the function to return the
5979 value described by RESULT. RESULT is address of the block of
5980 memory returned by __builtin_apply. */
5981 case BUILT_IN_RETURN:
5982 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5983 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5984 return const0_rtx;
5985
5986 case BUILT_IN_SAVEREGS:
5987 return expand_builtin_saveregs ();
5988
5989 case BUILT_IN_VA_ARG_PACK:
5990 /* All valid uses of __builtin_va_arg_pack () are removed during
5991 inlining. */
5992 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5993 return const0_rtx;
5994
5995 case BUILT_IN_VA_ARG_PACK_LEN:
5996 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5997 inlining. */
5998 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5999 return const0_rtx;
6000
6001 /* Return the address of the first anonymous stack arg. */
6002 case BUILT_IN_NEXT_ARG:
6003 if (fold_builtin_next_arg (exp, false))
6004 return const0_rtx;
6005 return expand_builtin_next_arg ();
6006
6007 case BUILT_IN_CLEAR_CACHE:
6008 target = expand_builtin___clear_cache (exp);
6009 if (target)
6010 return target;
6011 break;
6012
6013 case BUILT_IN_CLASSIFY_TYPE:
6014 return expand_builtin_classify_type (exp);
6015
6016 case BUILT_IN_CONSTANT_P:
6017 return const0_rtx;
6018
6019 case BUILT_IN_FRAME_ADDRESS:
6020 case BUILT_IN_RETURN_ADDRESS:
6021 return expand_builtin_frame_address (fndecl, exp);
6022
6023 /* Returns the address of the area where the structure is returned.
6024 0 otherwise. */
6025 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6026 if (call_expr_nargs (exp) != 0
6027 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6028 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6029 return const0_rtx;
6030 else
6031 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6032
6033 case BUILT_IN_ALLOCA:
6034 case BUILT_IN_ALLOCA_WITH_ALIGN:
6035 /* If the allocation stems from the declaration of a variable-sized
6036 object, it cannot accumulate. */
6037 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6038 if (target)
6039 return target;
6040 break;
6041
6042 case BUILT_IN_STACK_SAVE:
6043 return expand_stack_save ();
6044
6045 case BUILT_IN_STACK_RESTORE:
6046 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6047 return const0_rtx;
6048
6049 case BUILT_IN_BSWAP16:
6050 case BUILT_IN_BSWAP32:
6051 case BUILT_IN_BSWAP64:
6052 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6053 if (target)
6054 return target;
6055 break;
6056
6057 CASE_INT_FN (BUILT_IN_FFS):
6058 target = expand_builtin_unop (target_mode, exp, target,
6059 subtarget, ffs_optab);
6060 if (target)
6061 return target;
6062 break;
6063
6064 CASE_INT_FN (BUILT_IN_CLZ):
6065 target = expand_builtin_unop (target_mode, exp, target,
6066 subtarget, clz_optab);
6067 if (target)
6068 return target;
6069 break;
6070
6071 CASE_INT_FN (BUILT_IN_CTZ):
6072 target = expand_builtin_unop (target_mode, exp, target,
6073 subtarget, ctz_optab);
6074 if (target)
6075 return target;
6076 break;
6077
6078 CASE_INT_FN (BUILT_IN_CLRSB):
6079 target = expand_builtin_unop (target_mode, exp, target,
6080 subtarget, clrsb_optab);
6081 if (target)
6082 return target;
6083 break;
6084
6085 CASE_INT_FN (BUILT_IN_POPCOUNT):
6086 target = expand_builtin_unop (target_mode, exp, target,
6087 subtarget, popcount_optab);
6088 if (target)
6089 return target;
6090 break;
6091
6092 CASE_INT_FN (BUILT_IN_PARITY):
6093 target = expand_builtin_unop (target_mode, exp, target,
6094 subtarget, parity_optab);
6095 if (target)
6096 return target;
6097 break;
6098
6099 case BUILT_IN_STRLEN:
6100 target = expand_builtin_strlen (exp, target, target_mode);
6101 if (target)
6102 return target;
6103 break;
6104
6105 case BUILT_IN_STRCPY:
6106 target = expand_builtin_strcpy (exp, target);
6107 if (target)
6108 return target;
6109 break;
6110
6111 case BUILT_IN_STRNCPY:
6112 target = expand_builtin_strncpy (exp, target);
6113 if (target)
6114 return target;
6115 break;
6116
6117 case BUILT_IN_STPCPY:
6118 target = expand_builtin_stpcpy (exp, target, mode);
6119 if (target)
6120 return target;
6121 break;
6122
6123 case BUILT_IN_MEMCPY:
6124 target = expand_builtin_memcpy (exp, target);
6125 if (target)
6126 return target;
6127 break;
6128
6129 case BUILT_IN_MEMPCPY:
6130 target = expand_builtin_mempcpy (exp, target, mode);
6131 if (target)
6132 return target;
6133 break;
6134
6135 case BUILT_IN_MEMSET:
6136 target = expand_builtin_memset (exp, target, mode);
6137 if (target)
6138 return target;
6139 break;
6140
6141 case BUILT_IN_BZERO:
6142 target = expand_builtin_bzero (exp);
6143 if (target)
6144 return target;
6145 break;
6146
6147 case BUILT_IN_STRCMP:
6148 target = expand_builtin_strcmp (exp, target);
6149 if (target)
6150 return target;
6151 break;
6152
6153 case BUILT_IN_STRNCMP:
6154 target = expand_builtin_strncmp (exp, target, mode);
6155 if (target)
6156 return target;
6157 break;
6158
6159 case BUILT_IN_BCMP:
6160 case BUILT_IN_MEMCMP:
6161 target = expand_builtin_memcmp (exp, target, mode);
6162 if (target)
6163 return target;
6164 break;
6165
6166 case BUILT_IN_SETJMP:
6167 /* This should have been lowered to the builtins below. */
6168 gcc_unreachable ();
6169
6170 case BUILT_IN_SETJMP_SETUP:
6171 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6172 and the receiver label. */
6173 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6174 {
6175 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6176 VOIDmode, EXPAND_NORMAL);
6177 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6178 rtx label_r = label_rtx (label);
6179
6180 /* This is copied from the handling of non-local gotos. */
6181 expand_builtin_setjmp_setup (buf_addr, label_r);
6182 nonlocal_goto_handler_labels
6183 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6184 nonlocal_goto_handler_labels);
6185 /* ??? Do not let expand_label treat us as such since we would
6186 not want to be both on the list of non-local labels and on
6187 the list of forced labels. */
6188 FORCED_LABEL (label) = 0;
6189 return const0_rtx;
6190 }
6191 break;
6192
6193 case BUILT_IN_SETJMP_RECEIVER:
6194 /* __builtin_setjmp_receiver is passed the receiver label. */
6195 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6196 {
6197 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6198 rtx label_r = label_rtx (label);
6199
6200 expand_builtin_setjmp_receiver (label_r);
6201 return const0_rtx;
6202 }
6203 break;
6204
6205 /* __builtin_longjmp is passed a pointer to an array of five words.
6206 It's similar to the C library longjmp function but works with
6207 __builtin_setjmp above. */
6208 case BUILT_IN_LONGJMP:
6209 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6210 {
6211 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6212 VOIDmode, EXPAND_NORMAL);
6213 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6214
6215 if (value != const1_rtx)
6216 {
6217 error ("%<__builtin_longjmp%> second argument must be 1");
6218 return const0_rtx;
6219 }
6220
6221 expand_builtin_longjmp (buf_addr, value);
6222 return const0_rtx;
6223 }
6224 break;
6225
6226 case BUILT_IN_NONLOCAL_GOTO:
6227 target = expand_builtin_nonlocal_goto (exp);
6228 if (target)
6229 return target;
6230 break;
6231
6232 /* This updates the setjmp buffer that is its argument with the value
6233 of the current stack pointer. */
6234 case BUILT_IN_UPDATE_SETJMP_BUF:
6235 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6236 {
6237 rtx buf_addr
6238 = expand_normal (CALL_EXPR_ARG (exp, 0));
6239
6240 expand_builtin_update_setjmp_buf (buf_addr);
6241 return const0_rtx;
6242 }
6243 break;
6244
6245 case BUILT_IN_TRAP:
6246 expand_builtin_trap ();
6247 return const0_rtx;
6248
6249 case BUILT_IN_UNREACHABLE:
6250 expand_builtin_unreachable ();
6251 return const0_rtx;
6252
6253 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6254 case BUILT_IN_SIGNBITD32:
6255 case BUILT_IN_SIGNBITD64:
6256 case BUILT_IN_SIGNBITD128:
6257 target = expand_builtin_signbit (exp, target);
6258 if (target)
6259 return target;
6260 break;
6261
6262 /* Various hooks for the DWARF 2 __throw routine. */
6263 case BUILT_IN_UNWIND_INIT:
6264 expand_builtin_unwind_init ();
6265 return const0_rtx;
6266 case BUILT_IN_DWARF_CFA:
6267 return virtual_cfa_rtx;
6268 #ifdef DWARF2_UNWIND_INFO
6269 case BUILT_IN_DWARF_SP_COLUMN:
6270 return expand_builtin_dwarf_sp_column ();
6271 case BUILT_IN_INIT_DWARF_REG_SIZES:
6272 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6273 return const0_rtx;
6274 #endif
6275 case BUILT_IN_FROB_RETURN_ADDR:
6276 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6277 case BUILT_IN_EXTRACT_RETURN_ADDR:
6278 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6279 case BUILT_IN_EH_RETURN:
6280 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6281 CALL_EXPR_ARG (exp, 1));
6282 return const0_rtx;
6283 #ifdef EH_RETURN_DATA_REGNO
6284 case BUILT_IN_EH_RETURN_DATA_REGNO:
6285 return expand_builtin_eh_return_data_regno (exp);
6286 #endif
6287 case BUILT_IN_EXTEND_POINTER:
6288 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6289 case BUILT_IN_EH_POINTER:
6290 return expand_builtin_eh_pointer (exp);
6291 case BUILT_IN_EH_FILTER:
6292 return expand_builtin_eh_filter (exp);
6293 case BUILT_IN_EH_COPY_VALUES:
6294 return expand_builtin_eh_copy_values (exp);
6295
6296 case BUILT_IN_VA_START:
6297 return expand_builtin_va_start (exp);
6298 case BUILT_IN_VA_END:
6299 return expand_builtin_va_end (exp);
6300 case BUILT_IN_VA_COPY:
6301 return expand_builtin_va_copy (exp);
6302 case BUILT_IN_EXPECT:
6303 return expand_builtin_expect (exp, target);
6304 case BUILT_IN_ASSUME_ALIGNED:
6305 return expand_builtin_assume_aligned (exp, target);
6306 case BUILT_IN_PREFETCH:
6307 expand_builtin_prefetch (exp);
6308 return const0_rtx;
6309
6310 case BUILT_IN_INIT_TRAMPOLINE:
6311 return expand_builtin_init_trampoline (exp, true);
6312 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6313 return expand_builtin_init_trampoline (exp, false);
6314 case BUILT_IN_ADJUST_TRAMPOLINE:
6315 return expand_builtin_adjust_trampoline (exp);
6316
6317 case BUILT_IN_FORK:
6318 case BUILT_IN_EXECL:
6319 case BUILT_IN_EXECV:
6320 case BUILT_IN_EXECLP:
6321 case BUILT_IN_EXECLE:
6322 case BUILT_IN_EXECVP:
6323 case BUILT_IN_EXECVE:
6324 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6325 if (target)
6326 return target;
6327 break;
6328
6329 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6330 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6331 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6332 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6333 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6334 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6335 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6336 if (target)
6337 return target;
6338 break;
6339
6340 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6341 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6342 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6343 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6344 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6345 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6346 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6347 if (target)
6348 return target;
6349 break;
6350
6351 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6352 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6353 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6354 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6355 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6356 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6357 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6358 if (target)
6359 return target;
6360 break;
6361
6362 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6363 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6364 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6365 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6366 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6367 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6368 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6369 if (target)
6370 return target;
6371 break;
6372
6373 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6374 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6375 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6376 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6377 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6378 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6379 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6380 if (target)
6381 return target;
6382 break;
6383
6384 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6385 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6386 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6387 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6388 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6389 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6390 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6391 if (target)
6392 return target;
6393 break;
6394
6395 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6396 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6397 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6398 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6399 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6400 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6401 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6402 if (target)
6403 return target;
6404 break;
6405
6406 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6407 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6408 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6409 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6410 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6411 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6412 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6413 if (target)
6414 return target;
6415 break;
6416
6417 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6418 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6419 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6420 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6421 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6422 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6423 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6424 if (target)
6425 return target;
6426 break;
6427
6428 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6429 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6430 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6431 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6432 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6433 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6434 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6435 if (target)
6436 return target;
6437 break;
6438
6439 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6440 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6441 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6442 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6443 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6444 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6445 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6446 if (target)
6447 return target;
6448 break;
6449
6450 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6451 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6452 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6453 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6454 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6455 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6456 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6457 if (target)
6458 return target;
6459 break;
6460
6461 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6462 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6463 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6464 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6465 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6466 if (mode == VOIDmode)
6467 mode = TYPE_MODE (boolean_type_node);
6468 if (!target || !register_operand (target, mode))
6469 target = gen_reg_rtx (mode);
6470
6471 mode = get_builtin_sync_mode
6472 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6473 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6474 if (target)
6475 return target;
6476 break;
6477
6478 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6479 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6480 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6481 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6482 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6483 mode = get_builtin_sync_mode
6484 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6485 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6486 if (target)
6487 return target;
6488 break;
6489
6490 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6491 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6492 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6493 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6494 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6495 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6496 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6497 if (target)
6498 return target;
6499 break;
6500
6501 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6502 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6503 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6504 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6505 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6506 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6507 expand_builtin_sync_lock_release (mode, exp);
6508 return const0_rtx;
6509
6510 case BUILT_IN_SYNC_SYNCHRONIZE:
6511 expand_builtin_sync_synchronize ();
6512 return const0_rtx;
6513
6514 case BUILT_IN_ATOMIC_EXCHANGE_1:
6515 case BUILT_IN_ATOMIC_EXCHANGE_2:
6516 case BUILT_IN_ATOMIC_EXCHANGE_4:
6517 case BUILT_IN_ATOMIC_EXCHANGE_8:
6518 case BUILT_IN_ATOMIC_EXCHANGE_16:
6519 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6520 target = expand_builtin_atomic_exchange (mode, exp, target);
6521 if (target)
6522 return target;
6523 break;
6524
6525 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6526 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6527 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6528 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6529 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6530 {
6531 unsigned int nargs, z;
6532 vec<tree, va_gc> *vec;
6533
6534 mode =
6535 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6536 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6537 if (target)
6538 return target;
6539
6540 /* If this is turned into an external library call, the weak parameter
6541 must be dropped to match the expected parameter list. */
6542 nargs = call_expr_nargs (exp);
6543 vec_alloc (vec, nargs - 1);
6544 for (z = 0; z < 3; z++)
6545 vec->quick_push (CALL_EXPR_ARG (exp, z));
6546 /* Skip the boolean weak parameter. */
6547 for (z = 4; z < 6; z++)
6548 vec->quick_push (CALL_EXPR_ARG (exp, z));
6549 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6550 break;
6551 }
6552
6553 case BUILT_IN_ATOMIC_LOAD_1:
6554 case BUILT_IN_ATOMIC_LOAD_2:
6555 case BUILT_IN_ATOMIC_LOAD_4:
6556 case BUILT_IN_ATOMIC_LOAD_8:
6557 case BUILT_IN_ATOMIC_LOAD_16:
6558 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6559 target = expand_builtin_atomic_load (mode, exp, target);
6560 if (target)
6561 return target;
6562 break;
6563
6564 case BUILT_IN_ATOMIC_STORE_1:
6565 case BUILT_IN_ATOMIC_STORE_2:
6566 case BUILT_IN_ATOMIC_STORE_4:
6567 case BUILT_IN_ATOMIC_STORE_8:
6568 case BUILT_IN_ATOMIC_STORE_16:
6569 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6570 target = expand_builtin_atomic_store (mode, exp);
6571 if (target)
6572 return const0_rtx;
6573 break;
6574
6575 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6576 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6577 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6578 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6579 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6580 {
6581 enum built_in_function lib;
6582 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6583 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6584 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6585 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6586 ignore, lib);
6587 if (target)
6588 return target;
6589 break;
6590 }
6591 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6592 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6593 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6594 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6595 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6596 {
6597 enum built_in_function lib;
6598 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6599 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6600 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6601 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6602 ignore, lib);
6603 if (target)
6604 return target;
6605 break;
6606 }
6607 case BUILT_IN_ATOMIC_AND_FETCH_1:
6608 case BUILT_IN_ATOMIC_AND_FETCH_2:
6609 case BUILT_IN_ATOMIC_AND_FETCH_4:
6610 case BUILT_IN_ATOMIC_AND_FETCH_8:
6611 case BUILT_IN_ATOMIC_AND_FETCH_16:
6612 {
6613 enum built_in_function lib;
6614 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6615 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6616 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6617 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6618 ignore, lib);
6619 if (target)
6620 return target;
6621 break;
6622 }
6623 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6624 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6625 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6626 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6627 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6628 {
6629 enum built_in_function lib;
6630 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6631 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6632 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6633 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6634 ignore, lib);
6635 if (target)
6636 return target;
6637 break;
6638 }
6639 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6640 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6641 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6642 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6643 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6644 {
6645 enum built_in_function lib;
6646 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6647 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6648 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6649 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6650 ignore, lib);
6651 if (target)
6652 return target;
6653 break;
6654 }
6655 case BUILT_IN_ATOMIC_OR_FETCH_1:
6656 case BUILT_IN_ATOMIC_OR_FETCH_2:
6657 case BUILT_IN_ATOMIC_OR_FETCH_4:
6658 case BUILT_IN_ATOMIC_OR_FETCH_8:
6659 case BUILT_IN_ATOMIC_OR_FETCH_16:
6660 {
6661 enum built_in_function lib;
6662 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6663 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6664 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6665 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6666 ignore, lib);
6667 if (target)
6668 return target;
6669 break;
6670 }
6671 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6672 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6673 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6674 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6675 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6676 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6677 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6678 ignore, BUILT_IN_NONE);
6679 if (target)
6680 return target;
6681 break;
6682
6683 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6684 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6685 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6686 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6687 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6688 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6689 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6690 ignore, BUILT_IN_NONE);
6691 if (target)
6692 return target;
6693 break;
6694
6695 case BUILT_IN_ATOMIC_FETCH_AND_1:
6696 case BUILT_IN_ATOMIC_FETCH_AND_2:
6697 case BUILT_IN_ATOMIC_FETCH_AND_4:
6698 case BUILT_IN_ATOMIC_FETCH_AND_8:
6699 case BUILT_IN_ATOMIC_FETCH_AND_16:
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6701 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6702 ignore, BUILT_IN_NONE);
6703 if (target)
6704 return target;
6705 break;
6706
6707 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6708 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6709 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6710 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6711 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6712 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6713 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6714 ignore, BUILT_IN_NONE);
6715 if (target)
6716 return target;
6717 break;
6718
6719 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6720 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6721 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6722 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6723 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6725 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6726 ignore, BUILT_IN_NONE);
6727 if (target)
6728 return target;
6729 break;
6730
6731 case BUILT_IN_ATOMIC_FETCH_OR_1:
6732 case BUILT_IN_ATOMIC_FETCH_OR_2:
6733 case BUILT_IN_ATOMIC_FETCH_OR_4:
6734 case BUILT_IN_ATOMIC_FETCH_OR_8:
6735 case BUILT_IN_ATOMIC_FETCH_OR_16:
6736 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6737 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6738 ignore, BUILT_IN_NONE);
6739 if (target)
6740 return target;
6741 break;
6742
6743 case BUILT_IN_ATOMIC_TEST_AND_SET:
6744 return expand_builtin_atomic_test_and_set (exp, target);
6745
6746 case BUILT_IN_ATOMIC_CLEAR:
6747 return expand_builtin_atomic_clear (exp);
6748
6749 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6750 return expand_builtin_atomic_always_lock_free (exp);
6751
6752 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6753 target = expand_builtin_atomic_is_lock_free (exp);
6754 if (target)
6755 return target;
6756 break;
6757
6758 case BUILT_IN_ATOMIC_THREAD_FENCE:
6759 expand_builtin_atomic_thread_fence (exp);
6760 return const0_rtx;
6761
6762 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6763 expand_builtin_atomic_signal_fence (exp);
6764 return const0_rtx;
6765
6766 case BUILT_IN_OBJECT_SIZE:
6767 return expand_builtin_object_size (exp);
6768
6769 case BUILT_IN_MEMCPY_CHK:
6770 case BUILT_IN_MEMPCPY_CHK:
6771 case BUILT_IN_MEMMOVE_CHK:
6772 case BUILT_IN_MEMSET_CHK:
6773 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6774 if (target)
6775 return target;
6776 break;
6777
6778 case BUILT_IN_STRCPY_CHK:
6779 case BUILT_IN_STPCPY_CHK:
6780 case BUILT_IN_STRNCPY_CHK:
6781 case BUILT_IN_STPNCPY_CHK:
6782 case BUILT_IN_STRCAT_CHK:
6783 case BUILT_IN_STRNCAT_CHK:
6784 case BUILT_IN_SNPRINTF_CHK:
6785 case BUILT_IN_VSNPRINTF_CHK:
6786 maybe_emit_chk_warning (exp, fcode);
6787 break;
6788
6789 case BUILT_IN_SPRINTF_CHK:
6790 case BUILT_IN_VSPRINTF_CHK:
6791 maybe_emit_sprintf_chk_warning (exp, fcode);
6792 break;
6793
6794 case BUILT_IN_FREE:
6795 if (warn_free_nonheap_object)
6796 maybe_emit_free_warning (exp);
6797 break;
6798
6799 case BUILT_IN_THREAD_POINTER:
6800 return expand_builtin_thread_pointer (exp, target);
6801
6802 case BUILT_IN_SET_THREAD_POINTER:
6803 expand_builtin_set_thread_pointer (exp);
6804 return const0_rtx;
6805
6806 case BUILT_IN_CILK_DETACH:
6807 expand_builtin_cilk_detach (exp);
6808 return const0_rtx;
6809
6810 case BUILT_IN_CILK_POP_FRAME:
6811 expand_builtin_cilk_pop_frame (exp);
6812 return const0_rtx;
6813
6814 default: /* just do library call, if unknown builtin */
6815 break;
6816 }
6817
6818 /* The switch statement above can drop through to cause the function
6819 to be called normally. */
6820 return expand_call (exp, target, ignore);
6821 }
6822
6823 /* Determine whether a tree node represents a call to a built-in
6824 function. If the tree T is a call to a built-in function with
6825 the right number of arguments of the appropriate types, return
6826 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6827 Otherwise the return value is END_BUILTINS. */
6828
6829 enum built_in_function
6830 builtin_mathfn_code (const_tree t)
6831 {
6832 const_tree fndecl, arg, parmlist;
6833 const_tree argtype, parmtype;
6834 const_call_expr_arg_iterator iter;
6835
6836 if (TREE_CODE (t) != CALL_EXPR
6837 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6838 return END_BUILTINS;
6839
6840 fndecl = get_callee_fndecl (t);
6841 if (fndecl == NULL_TREE
6842 || TREE_CODE (fndecl) != FUNCTION_DECL
6843 || ! DECL_BUILT_IN (fndecl)
6844 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6845 return END_BUILTINS;
6846
6847 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6848 init_const_call_expr_arg_iterator (t, &iter);
6849 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6850 {
6851 /* If a function doesn't take a variable number of arguments,
6852 the last element in the list will have type `void'. */
6853 parmtype = TREE_VALUE (parmlist);
6854 if (VOID_TYPE_P (parmtype))
6855 {
6856 if (more_const_call_expr_args_p (&iter))
6857 return END_BUILTINS;
6858 return DECL_FUNCTION_CODE (fndecl);
6859 }
6860
6861 if (! more_const_call_expr_args_p (&iter))
6862 return END_BUILTINS;
6863
6864 arg = next_const_call_expr_arg (&iter);
6865 argtype = TREE_TYPE (arg);
6866
6867 if (SCALAR_FLOAT_TYPE_P (parmtype))
6868 {
6869 if (! SCALAR_FLOAT_TYPE_P (argtype))
6870 return END_BUILTINS;
6871 }
6872 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6873 {
6874 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6875 return END_BUILTINS;
6876 }
6877 else if (POINTER_TYPE_P (parmtype))
6878 {
6879 if (! POINTER_TYPE_P (argtype))
6880 return END_BUILTINS;
6881 }
6882 else if (INTEGRAL_TYPE_P (parmtype))
6883 {
6884 if (! INTEGRAL_TYPE_P (argtype))
6885 return END_BUILTINS;
6886 }
6887 else
6888 return END_BUILTINS;
6889 }
6890
6891 /* Variable-length argument list. */
6892 return DECL_FUNCTION_CODE (fndecl);
6893 }
6894
6895 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6896 evaluate to a constant. */
6897
6898 static tree
6899 fold_builtin_constant_p (tree arg)
6900 {
6901 /* We return 1 for a numeric type that's known to be a constant
6902 value at compile-time or for an aggregate type that's a
6903 literal constant. */
6904 STRIP_NOPS (arg);
6905
6906 /* If we know this is a constant, emit the constant of one. */
6907 if (CONSTANT_CLASS_P (arg)
6908 || (TREE_CODE (arg) == CONSTRUCTOR
6909 && TREE_CONSTANT (arg)))
6910 return integer_one_node;
6911 if (TREE_CODE (arg) == ADDR_EXPR)
6912 {
6913 tree op = TREE_OPERAND (arg, 0);
6914 if (TREE_CODE (op) == STRING_CST
6915 || (TREE_CODE (op) == ARRAY_REF
6916 && integer_zerop (TREE_OPERAND (op, 1))
6917 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6918 return integer_one_node;
6919 }
6920
6921 /* If this expression has side effects, show we don't know it to be a
6922 constant. Likewise if it's a pointer or aggregate type since in
6923 those case we only want literals, since those are only optimized
6924 when generating RTL, not later.
6925 And finally, if we are compiling an initializer, not code, we
6926 need to return a definite result now; there's not going to be any
6927 more optimization done. */
6928 if (TREE_SIDE_EFFECTS (arg)
6929 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6930 || POINTER_TYPE_P (TREE_TYPE (arg))
6931 || cfun == 0
6932 || folding_initializer
6933 || force_folding_builtin_constant_p)
6934 return integer_zero_node;
6935
6936 return NULL_TREE;
6937 }
6938
6939 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6940 return it as a truthvalue. */
6941
6942 static tree
6943 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6944 tree predictor)
6945 {
6946 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6947
6948 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6949 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6950 ret_type = TREE_TYPE (TREE_TYPE (fn));
6951 pred_type = TREE_VALUE (arg_types);
6952 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6953
6954 pred = fold_convert_loc (loc, pred_type, pred);
6955 expected = fold_convert_loc (loc, expected_type, expected);
6956 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6957 predictor);
6958
6959 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6960 build_int_cst (ret_type, 0));
6961 }
6962
6963 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6964 NULL_TREE if no simplification is possible. */
6965
6966 tree
6967 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
6968 {
6969 tree inner, fndecl, inner_arg0;
6970 enum tree_code code;
6971
6972 /* Distribute the expected value over short-circuiting operators.
6973 See through the cast from truthvalue_type_node to long. */
6974 inner_arg0 = arg0;
6975 while (TREE_CODE (inner_arg0) == NOP_EXPR
6976 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6977 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6978 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6979
6980 /* If this is a builtin_expect within a builtin_expect keep the
6981 inner one. See through a comparison against a constant. It
6982 might have been added to create a thruthvalue. */
6983 inner = inner_arg0;
6984
6985 if (COMPARISON_CLASS_P (inner)
6986 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6987 inner = TREE_OPERAND (inner, 0);
6988
6989 if (TREE_CODE (inner) == CALL_EXPR
6990 && (fndecl = get_callee_fndecl (inner))
6991 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6992 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6993 return arg0;
6994
6995 inner = inner_arg0;
6996 code = TREE_CODE (inner);
6997 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6998 {
6999 tree op0 = TREE_OPERAND (inner, 0);
7000 tree op1 = TREE_OPERAND (inner, 1);
7001
7002 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7003 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7004 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7005
7006 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7007 }
7008
7009 /* If the argument isn't invariant then there's nothing else we can do. */
7010 if (!TREE_CONSTANT (inner_arg0))
7011 return NULL_TREE;
7012
7013 /* If we expect that a comparison against the argument will fold to
7014 a constant return the constant. In practice, this means a true
7015 constant or the address of a non-weak symbol. */
7016 inner = inner_arg0;
7017 STRIP_NOPS (inner);
7018 if (TREE_CODE (inner) == ADDR_EXPR)
7019 {
7020 do
7021 {
7022 inner = TREE_OPERAND (inner, 0);
7023 }
7024 while (TREE_CODE (inner) == COMPONENT_REF
7025 || TREE_CODE (inner) == ARRAY_REF);
7026 if ((TREE_CODE (inner) == VAR_DECL
7027 || TREE_CODE (inner) == FUNCTION_DECL)
7028 && DECL_WEAK (inner))
7029 return NULL_TREE;
7030 }
7031
7032 /* Otherwise, ARG0 already has the proper type for the return value. */
7033 return arg0;
7034 }
7035
7036 /* Fold a call to __builtin_classify_type with argument ARG. */
7037
7038 static tree
7039 fold_builtin_classify_type (tree arg)
7040 {
7041 if (arg == 0)
7042 return build_int_cst (integer_type_node, no_type_class);
7043
7044 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7045 }
7046
7047 /* Fold a call to __builtin_strlen with argument ARG. */
7048
7049 static tree
7050 fold_builtin_strlen (location_t loc, tree type, tree arg)
7051 {
7052 if (!validate_arg (arg, POINTER_TYPE))
7053 return NULL_TREE;
7054 else
7055 {
7056 tree len = c_strlen (arg, 0);
7057
7058 if (len)
7059 return fold_convert_loc (loc, type, len);
7060
7061 return NULL_TREE;
7062 }
7063 }
7064
7065 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7066
7067 static tree
7068 fold_builtin_inf (location_t loc, tree type, int warn)
7069 {
7070 REAL_VALUE_TYPE real;
7071
7072 /* __builtin_inff is intended to be usable to define INFINITY on all
7073 targets. If an infinity is not available, INFINITY expands "to a
7074 positive constant of type float that overflows at translation
7075 time", footnote "In this case, using INFINITY will violate the
7076 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7077 Thus we pedwarn to ensure this constraint violation is
7078 diagnosed. */
7079 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7080 pedwarn (loc, 0, "target format does not support infinity");
7081
7082 real_inf (&real);
7083 return build_real (type, real);
7084 }
7085
7086 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7087
7088 static tree
7089 fold_builtin_nan (tree arg, tree type, int quiet)
7090 {
7091 REAL_VALUE_TYPE real;
7092 const char *str;
7093
7094 if (!validate_arg (arg, POINTER_TYPE))
7095 return NULL_TREE;
7096 str = c_getstr (arg);
7097 if (!str)
7098 return NULL_TREE;
7099
7100 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7101 return NULL_TREE;
7102
7103 return build_real (type, real);
7104 }
7105
7106 /* Return true if the floating point expression T has an integer value.
7107 We also allow +Inf, -Inf and NaN to be considered integer values. */
7108
7109 static bool
7110 integer_valued_real_p (tree t)
7111 {
7112 switch (TREE_CODE (t))
7113 {
7114 case FLOAT_EXPR:
7115 return true;
7116
7117 case ABS_EXPR:
7118 case SAVE_EXPR:
7119 return integer_valued_real_p (TREE_OPERAND (t, 0));
7120
7121 case COMPOUND_EXPR:
7122 case MODIFY_EXPR:
7123 case BIND_EXPR:
7124 return integer_valued_real_p (TREE_OPERAND (t, 1));
7125
7126 case PLUS_EXPR:
7127 case MINUS_EXPR:
7128 case MULT_EXPR:
7129 case MIN_EXPR:
7130 case MAX_EXPR:
7131 return integer_valued_real_p (TREE_OPERAND (t, 0))
7132 && integer_valued_real_p (TREE_OPERAND (t, 1));
7133
7134 case COND_EXPR:
7135 return integer_valued_real_p (TREE_OPERAND (t, 1))
7136 && integer_valued_real_p (TREE_OPERAND (t, 2));
7137
7138 case REAL_CST:
7139 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7140
7141 case NOP_EXPR:
7142 {
7143 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7144 if (TREE_CODE (type) == INTEGER_TYPE)
7145 return true;
7146 if (TREE_CODE (type) == REAL_TYPE)
7147 return integer_valued_real_p (TREE_OPERAND (t, 0));
7148 break;
7149 }
7150
7151 case CALL_EXPR:
7152 switch (builtin_mathfn_code (t))
7153 {
7154 CASE_FLT_FN (BUILT_IN_CEIL):
7155 CASE_FLT_FN (BUILT_IN_FLOOR):
7156 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7157 CASE_FLT_FN (BUILT_IN_RINT):
7158 CASE_FLT_FN (BUILT_IN_ROUND):
7159 CASE_FLT_FN (BUILT_IN_TRUNC):
7160 return true;
7161
7162 CASE_FLT_FN (BUILT_IN_FMIN):
7163 CASE_FLT_FN (BUILT_IN_FMAX):
7164 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7165 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7166
7167 default:
7168 break;
7169 }
7170 break;
7171
7172 default:
7173 break;
7174 }
7175 return false;
7176 }
7177
7178 /* FNDECL is assumed to be a builtin where truncation can be propagated
7179 across (for instance floor((double)f) == (double)floorf (f).
7180 Do the transformation for a call with argument ARG. */
7181
7182 static tree
7183 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7184 {
7185 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7186
7187 if (!validate_arg (arg, REAL_TYPE))
7188 return NULL_TREE;
7189
7190 /* Integer rounding functions are idempotent. */
7191 if (fcode == builtin_mathfn_code (arg))
7192 return arg;
7193
7194 /* If argument is already integer valued, and we don't need to worry
7195 about setting errno, there's no need to perform rounding. */
7196 if (! flag_errno_math && integer_valued_real_p (arg))
7197 return arg;
7198
7199 if (optimize)
7200 {
7201 tree arg0 = strip_float_extensions (arg);
7202 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7203 tree newtype = TREE_TYPE (arg0);
7204 tree decl;
7205
7206 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7207 && (decl = mathfn_built_in (newtype, fcode)))
7208 return fold_convert_loc (loc, ftype,
7209 build_call_expr_loc (loc, decl, 1,
7210 fold_convert_loc (loc,
7211 newtype,
7212 arg0)));
7213 }
7214 return NULL_TREE;
7215 }
7216
7217 /* FNDECL is assumed to be builtin which can narrow the FP type of
7218 the argument, for instance lround((double)f) -> lroundf (f).
7219 Do the transformation for a call with argument ARG. */
7220
7221 static tree
7222 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7223 {
7224 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7225
7226 if (!validate_arg (arg, REAL_TYPE))
7227 return NULL_TREE;
7228
7229 /* If argument is already integer valued, and we don't need to worry
7230 about setting errno, there's no need to perform rounding. */
7231 if (! flag_errno_math && integer_valued_real_p (arg))
7232 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7233 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7234
7235 if (optimize)
7236 {
7237 tree ftype = TREE_TYPE (arg);
7238 tree arg0 = strip_float_extensions (arg);
7239 tree newtype = TREE_TYPE (arg0);
7240 tree decl;
7241
7242 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7243 && (decl = mathfn_built_in (newtype, fcode)))
7244 return build_call_expr_loc (loc, decl, 1,
7245 fold_convert_loc (loc, newtype, arg0));
7246 }
7247
7248 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7249 sizeof (int) == sizeof (long). */
7250 if (TYPE_PRECISION (integer_type_node)
7251 == TYPE_PRECISION (long_integer_type_node))
7252 {
7253 tree newfn = NULL_TREE;
7254 switch (fcode)
7255 {
7256 CASE_FLT_FN (BUILT_IN_ICEIL):
7257 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7258 break;
7259
7260 CASE_FLT_FN (BUILT_IN_IFLOOR):
7261 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7262 break;
7263
7264 CASE_FLT_FN (BUILT_IN_IROUND):
7265 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7266 break;
7267
7268 CASE_FLT_FN (BUILT_IN_IRINT):
7269 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7270 break;
7271
7272 default:
7273 break;
7274 }
7275
7276 if (newfn)
7277 {
7278 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7279 return fold_convert_loc (loc,
7280 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7281 }
7282 }
7283
7284 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7285 sizeof (long long) == sizeof (long). */
7286 if (TYPE_PRECISION (long_long_integer_type_node)
7287 == TYPE_PRECISION (long_integer_type_node))
7288 {
7289 tree newfn = NULL_TREE;
7290 switch (fcode)
7291 {
7292 CASE_FLT_FN (BUILT_IN_LLCEIL):
7293 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7294 break;
7295
7296 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7297 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7298 break;
7299
7300 CASE_FLT_FN (BUILT_IN_LLROUND):
7301 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7302 break;
7303
7304 CASE_FLT_FN (BUILT_IN_LLRINT):
7305 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7306 break;
7307
7308 default:
7309 break;
7310 }
7311
7312 if (newfn)
7313 {
7314 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7315 return fold_convert_loc (loc,
7316 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7317 }
7318 }
7319
7320 return NULL_TREE;
7321 }
7322
7323 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7324 return type. Return NULL_TREE if no simplification can be made. */
7325
7326 static tree
7327 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7328 {
7329 tree res;
7330
7331 if (!validate_arg (arg, COMPLEX_TYPE)
7332 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7333 return NULL_TREE;
7334
7335 /* Calculate the result when the argument is a constant. */
7336 if (TREE_CODE (arg) == COMPLEX_CST
7337 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7338 type, mpfr_hypot)))
7339 return res;
7340
7341 if (TREE_CODE (arg) == COMPLEX_EXPR)
7342 {
7343 tree real = TREE_OPERAND (arg, 0);
7344 tree imag = TREE_OPERAND (arg, 1);
7345
7346 /* If either part is zero, cabs is fabs of the other. */
7347 if (real_zerop (real))
7348 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7349 if (real_zerop (imag))
7350 return fold_build1_loc (loc, ABS_EXPR, type, real);
7351
7352 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7353 if (flag_unsafe_math_optimizations
7354 && operand_equal_p (real, imag, OEP_PURE_SAME))
7355 {
7356 const REAL_VALUE_TYPE sqrt2_trunc
7357 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7358 STRIP_NOPS (real);
7359 return fold_build2_loc (loc, MULT_EXPR, type,
7360 fold_build1_loc (loc, ABS_EXPR, type, real),
7361 build_real (type, sqrt2_trunc));
7362 }
7363 }
7364
7365 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7366 if (TREE_CODE (arg) == NEGATE_EXPR
7367 || TREE_CODE (arg) == CONJ_EXPR)
7368 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7369
7370 /* Don't do this when optimizing for size. */
7371 if (flag_unsafe_math_optimizations
7372 && optimize && optimize_function_for_speed_p (cfun))
7373 {
7374 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7375
7376 if (sqrtfn != NULL_TREE)
7377 {
7378 tree rpart, ipart, result;
7379
7380 arg = builtin_save_expr (arg);
7381
7382 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7383 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7384
7385 rpart = builtin_save_expr (rpart);
7386 ipart = builtin_save_expr (ipart);
7387
7388 result = fold_build2_loc (loc, PLUS_EXPR, type,
7389 fold_build2_loc (loc, MULT_EXPR, type,
7390 rpart, rpart),
7391 fold_build2_loc (loc, MULT_EXPR, type,
7392 ipart, ipart));
7393
7394 return build_call_expr_loc (loc, sqrtfn, 1, result);
7395 }
7396 }
7397
7398 return NULL_TREE;
7399 }
7400
7401 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7402 complex tree type of the result. If NEG is true, the imaginary
7403 zero is negative. */
7404
7405 static tree
7406 build_complex_cproj (tree type, bool neg)
7407 {
7408 REAL_VALUE_TYPE rinf, rzero = dconst0;
7409
7410 real_inf (&rinf);
7411 rzero.sign = neg;
7412 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7413 build_real (TREE_TYPE (type), rzero));
7414 }
7415
7416 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7417 return type. Return NULL_TREE if no simplification can be made. */
7418
7419 static tree
7420 fold_builtin_cproj (location_t loc, tree arg, tree type)
7421 {
7422 if (!validate_arg (arg, COMPLEX_TYPE)
7423 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7424 return NULL_TREE;
7425
7426 /* If there are no infinities, return arg. */
7427 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7428 return non_lvalue_loc (loc, arg);
7429
7430 /* Calculate the result when the argument is a constant. */
7431 if (TREE_CODE (arg) == COMPLEX_CST)
7432 {
7433 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7434 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7435
7436 if (real_isinf (real) || real_isinf (imag))
7437 return build_complex_cproj (type, imag->sign);
7438 else
7439 return arg;
7440 }
7441 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7442 {
7443 tree real = TREE_OPERAND (arg, 0);
7444 tree imag = TREE_OPERAND (arg, 1);
7445
7446 STRIP_NOPS (real);
7447 STRIP_NOPS (imag);
7448
7449 /* If the real part is inf and the imag part is known to be
7450 nonnegative, return (inf + 0i). Remember side-effects are
7451 possible in the imag part. */
7452 if (TREE_CODE (real) == REAL_CST
7453 && real_isinf (TREE_REAL_CST_PTR (real))
7454 && tree_expr_nonnegative_p (imag))
7455 return omit_one_operand_loc (loc, type,
7456 build_complex_cproj (type, false),
7457 arg);
7458
7459 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7460 Remember side-effects are possible in the real part. */
7461 if (TREE_CODE (imag) == REAL_CST
7462 && real_isinf (TREE_REAL_CST_PTR (imag)))
7463 return
7464 omit_one_operand_loc (loc, type,
7465 build_complex_cproj (type, TREE_REAL_CST_PTR
7466 (imag)->sign), arg);
7467 }
7468
7469 return NULL_TREE;
7470 }
7471
7472 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7473 Return NULL_TREE if no simplification can be made. */
7474
7475 static tree
7476 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7477 {
7478
7479 enum built_in_function fcode;
7480 tree res;
7481
7482 if (!validate_arg (arg, REAL_TYPE))
7483 return NULL_TREE;
7484
7485 /* Calculate the result when the argument is a constant. */
7486 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7487 return res;
7488
7489 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7490 fcode = builtin_mathfn_code (arg);
7491 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7492 {
7493 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7494 arg = fold_build2_loc (loc, MULT_EXPR, type,
7495 CALL_EXPR_ARG (arg, 0),
7496 build_real (type, dconsthalf));
7497 return build_call_expr_loc (loc, expfn, 1, arg);
7498 }
7499
7500 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7501 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7502 {
7503 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7504
7505 if (powfn)
7506 {
7507 tree arg0 = CALL_EXPR_ARG (arg, 0);
7508 tree tree_root;
7509 /* The inner root was either sqrt or cbrt. */
7510 /* This was a conditional expression but it triggered a bug
7511 in Sun C 5.5. */
7512 REAL_VALUE_TYPE dconstroot;
7513 if (BUILTIN_SQRT_P (fcode))
7514 dconstroot = dconsthalf;
7515 else
7516 dconstroot = dconst_third ();
7517
7518 /* Adjust for the outer root. */
7519 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7520 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7521 tree_root = build_real (type, dconstroot);
7522 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7523 }
7524 }
7525
7526 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7527 if (flag_unsafe_math_optimizations
7528 && (fcode == BUILT_IN_POW
7529 || fcode == BUILT_IN_POWF
7530 || fcode == BUILT_IN_POWL))
7531 {
7532 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7533 tree arg0 = CALL_EXPR_ARG (arg, 0);
7534 tree arg1 = CALL_EXPR_ARG (arg, 1);
7535 tree narg1;
7536 if (!tree_expr_nonnegative_p (arg0))
7537 arg0 = build1 (ABS_EXPR, type, arg0);
7538 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7539 build_real (type, dconsthalf));
7540 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7541 }
7542
7543 return NULL_TREE;
7544 }
7545
7546 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7547 Return NULL_TREE if no simplification can be made. */
7548
7549 static tree
7550 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7551 {
7552 const enum built_in_function fcode = builtin_mathfn_code (arg);
7553 tree res;
7554
7555 if (!validate_arg (arg, REAL_TYPE))
7556 return NULL_TREE;
7557
7558 /* Calculate the result when the argument is a constant. */
7559 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7560 return res;
7561
7562 if (flag_unsafe_math_optimizations)
7563 {
7564 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7565 if (BUILTIN_EXPONENT_P (fcode))
7566 {
7567 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7568 const REAL_VALUE_TYPE third_trunc =
7569 real_value_truncate (TYPE_MODE (type), dconst_third ());
7570 arg = fold_build2_loc (loc, MULT_EXPR, type,
7571 CALL_EXPR_ARG (arg, 0),
7572 build_real (type, third_trunc));
7573 return build_call_expr_loc (loc, expfn, 1, arg);
7574 }
7575
7576 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7577 if (BUILTIN_SQRT_P (fcode))
7578 {
7579 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7580
7581 if (powfn)
7582 {
7583 tree arg0 = CALL_EXPR_ARG (arg, 0);
7584 tree tree_root;
7585 REAL_VALUE_TYPE dconstroot = dconst_third ();
7586
7587 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7588 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7589 tree_root = build_real (type, dconstroot);
7590 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7591 }
7592 }
7593
7594 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7595 if (BUILTIN_CBRT_P (fcode))
7596 {
7597 tree arg0 = CALL_EXPR_ARG (arg, 0);
7598 if (tree_expr_nonnegative_p (arg0))
7599 {
7600 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7601
7602 if (powfn)
7603 {
7604 tree tree_root;
7605 REAL_VALUE_TYPE dconstroot;
7606
7607 real_arithmetic (&dconstroot, MULT_EXPR,
7608 dconst_third_ptr (), dconst_third_ptr ());
7609 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7610 tree_root = build_real (type, dconstroot);
7611 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7612 }
7613 }
7614 }
7615
7616 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7617 if (fcode == BUILT_IN_POW
7618 || fcode == BUILT_IN_POWF
7619 || fcode == BUILT_IN_POWL)
7620 {
7621 tree arg00 = CALL_EXPR_ARG (arg, 0);
7622 tree arg01 = CALL_EXPR_ARG (arg, 1);
7623 if (tree_expr_nonnegative_p (arg00))
7624 {
7625 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7626 const REAL_VALUE_TYPE dconstroot
7627 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7628 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7629 build_real (type, dconstroot));
7630 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7631 }
7632 }
7633 }
7634 return NULL_TREE;
7635 }
7636
7637 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7638 TYPE is the type of the return value. Return NULL_TREE if no
7639 simplification can be made. */
7640
7641 static tree
7642 fold_builtin_cos (location_t loc,
7643 tree arg, tree type, tree fndecl)
7644 {
7645 tree res, narg;
7646
7647 if (!validate_arg (arg, REAL_TYPE))
7648 return NULL_TREE;
7649
7650 /* Calculate the result when the argument is a constant. */
7651 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7652 return res;
7653
7654 /* Optimize cos(-x) into cos (x). */
7655 if ((narg = fold_strip_sign_ops (arg)))
7656 return build_call_expr_loc (loc, fndecl, 1, narg);
7657
7658 return NULL_TREE;
7659 }
7660
7661 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7662 Return NULL_TREE if no simplification can be made. */
7663
7664 static tree
7665 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7666 {
7667 if (validate_arg (arg, REAL_TYPE))
7668 {
7669 tree res, narg;
7670
7671 /* Calculate the result when the argument is a constant. */
7672 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7673 return res;
7674
7675 /* Optimize cosh(-x) into cosh (x). */
7676 if ((narg = fold_strip_sign_ops (arg)))
7677 return build_call_expr_loc (loc, fndecl, 1, narg);
7678 }
7679
7680 return NULL_TREE;
7681 }
7682
7683 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7684 argument ARG. TYPE is the type of the return value. Return
7685 NULL_TREE if no simplification can be made. */
7686
7687 static tree
7688 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7689 bool hyper)
7690 {
7691 if (validate_arg (arg, COMPLEX_TYPE)
7692 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7693 {
7694 tree tmp;
7695
7696 /* Calculate the result when the argument is a constant. */
7697 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7698 return tmp;
7699
7700 /* Optimize fn(-x) into fn(x). */
7701 if ((tmp = fold_strip_sign_ops (arg)))
7702 return build_call_expr_loc (loc, fndecl, 1, tmp);
7703 }
7704
7705 return NULL_TREE;
7706 }
7707
7708 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7709 Return NULL_TREE if no simplification can be made. */
7710
7711 static tree
7712 fold_builtin_tan (tree arg, tree type)
7713 {
7714 enum built_in_function fcode;
7715 tree res;
7716
7717 if (!validate_arg (arg, REAL_TYPE))
7718 return NULL_TREE;
7719
7720 /* Calculate the result when the argument is a constant. */
7721 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7722 return res;
7723
7724 /* Optimize tan(atan(x)) = x. */
7725 fcode = builtin_mathfn_code (arg);
7726 if (flag_unsafe_math_optimizations
7727 && (fcode == BUILT_IN_ATAN
7728 || fcode == BUILT_IN_ATANF
7729 || fcode == BUILT_IN_ATANL))
7730 return CALL_EXPR_ARG (arg, 0);
7731
7732 return NULL_TREE;
7733 }
7734
7735 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7736 NULL_TREE if no simplification can be made. */
7737
7738 static tree
7739 fold_builtin_sincos (location_t loc,
7740 tree arg0, tree arg1, tree arg2)
7741 {
7742 tree type;
7743 tree res, fn, call;
7744
7745 if (!validate_arg (arg0, REAL_TYPE)
7746 || !validate_arg (arg1, POINTER_TYPE)
7747 || !validate_arg (arg2, POINTER_TYPE))
7748 return NULL_TREE;
7749
7750 type = TREE_TYPE (arg0);
7751
7752 /* Calculate the result when the argument is a constant. */
7753 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7754 return res;
7755
7756 /* Canonicalize sincos to cexpi. */
7757 if (!targetm.libc_has_function (function_c99_math_complex))
7758 return NULL_TREE;
7759 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7760 if (!fn)
7761 return NULL_TREE;
7762
7763 call = build_call_expr_loc (loc, fn, 1, arg0);
7764 call = builtin_save_expr (call);
7765
7766 return build2 (COMPOUND_EXPR, void_type_node,
7767 build2 (MODIFY_EXPR, void_type_node,
7768 build_fold_indirect_ref_loc (loc, arg1),
7769 build1 (IMAGPART_EXPR, type, call)),
7770 build2 (MODIFY_EXPR, void_type_node,
7771 build_fold_indirect_ref_loc (loc, arg2),
7772 build1 (REALPART_EXPR, type, call)));
7773 }
7774
7775 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7776 NULL_TREE if no simplification can be made. */
7777
7778 static tree
7779 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7780 {
7781 tree rtype;
7782 tree realp, imagp, ifn;
7783 tree res;
7784
7785 if (!validate_arg (arg0, COMPLEX_TYPE)
7786 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7787 return NULL_TREE;
7788
7789 /* Calculate the result when the argument is a constant. */
7790 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7791 return res;
7792
7793 rtype = TREE_TYPE (TREE_TYPE (arg0));
7794
7795 /* In case we can figure out the real part of arg0 and it is constant zero
7796 fold to cexpi. */
7797 if (!targetm.libc_has_function (function_c99_math_complex))
7798 return NULL_TREE;
7799 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7800 if (!ifn)
7801 return NULL_TREE;
7802
7803 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7804 && real_zerop (realp))
7805 {
7806 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7807 return build_call_expr_loc (loc, ifn, 1, narg);
7808 }
7809
7810 /* In case we can easily decompose real and imaginary parts split cexp
7811 to exp (r) * cexpi (i). */
7812 if (flag_unsafe_math_optimizations
7813 && realp)
7814 {
7815 tree rfn, rcall, icall;
7816
7817 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7818 if (!rfn)
7819 return NULL_TREE;
7820
7821 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7822 if (!imagp)
7823 return NULL_TREE;
7824
7825 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7826 icall = builtin_save_expr (icall);
7827 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7828 rcall = builtin_save_expr (rcall);
7829 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7830 fold_build2_loc (loc, MULT_EXPR, rtype,
7831 rcall,
7832 fold_build1_loc (loc, REALPART_EXPR,
7833 rtype, icall)),
7834 fold_build2_loc (loc, MULT_EXPR, rtype,
7835 rcall,
7836 fold_build1_loc (loc, IMAGPART_EXPR,
7837 rtype, icall)));
7838 }
7839
7840 return NULL_TREE;
7841 }
7842
7843 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7844 Return NULL_TREE if no simplification can be made. */
7845
7846 static tree
7847 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7848 {
7849 if (!validate_arg (arg, REAL_TYPE))
7850 return NULL_TREE;
7851
7852 /* Optimize trunc of constant value. */
7853 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7854 {
7855 REAL_VALUE_TYPE r, x;
7856 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7857
7858 x = TREE_REAL_CST (arg);
7859 real_trunc (&r, TYPE_MODE (type), &x);
7860 return build_real (type, r);
7861 }
7862
7863 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7864 }
7865
7866 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7867 Return NULL_TREE if no simplification can be made. */
7868
7869 static tree
7870 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7871 {
7872 if (!validate_arg (arg, REAL_TYPE))
7873 return NULL_TREE;
7874
7875 /* Optimize floor of constant value. */
7876 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7877 {
7878 REAL_VALUE_TYPE x;
7879
7880 x = TREE_REAL_CST (arg);
7881 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7882 {
7883 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7884 REAL_VALUE_TYPE r;
7885
7886 real_floor (&r, TYPE_MODE (type), &x);
7887 return build_real (type, r);
7888 }
7889 }
7890
7891 /* Fold floor (x) where x is nonnegative to trunc (x). */
7892 if (tree_expr_nonnegative_p (arg))
7893 {
7894 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7895 if (truncfn)
7896 return build_call_expr_loc (loc, truncfn, 1, arg);
7897 }
7898
7899 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7900 }
7901
7902 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7903 Return NULL_TREE if no simplification can be made. */
7904
7905 static tree
7906 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7907 {
7908 if (!validate_arg (arg, REAL_TYPE))
7909 return NULL_TREE;
7910
7911 /* Optimize ceil of constant value. */
7912 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7913 {
7914 REAL_VALUE_TYPE x;
7915
7916 x = TREE_REAL_CST (arg);
7917 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7918 {
7919 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7920 REAL_VALUE_TYPE r;
7921
7922 real_ceil (&r, TYPE_MODE (type), &x);
7923 return build_real (type, r);
7924 }
7925 }
7926
7927 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7928 }
7929
7930 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7931 Return NULL_TREE if no simplification can be made. */
7932
7933 static tree
7934 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7935 {
7936 if (!validate_arg (arg, REAL_TYPE))
7937 return NULL_TREE;
7938
7939 /* Optimize round of constant value. */
7940 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7941 {
7942 REAL_VALUE_TYPE x;
7943
7944 x = TREE_REAL_CST (arg);
7945 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7946 {
7947 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7948 REAL_VALUE_TYPE r;
7949
7950 real_round (&r, TYPE_MODE (type), &x);
7951 return build_real (type, r);
7952 }
7953 }
7954
7955 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7956 }
7957
7958 /* Fold function call to builtin lround, lroundf or lroundl (or the
7959 corresponding long long versions) and other rounding functions. ARG
7960 is the argument to the call. Return NULL_TREE if no simplification
7961 can be made. */
7962
7963 static tree
7964 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7965 {
7966 if (!validate_arg (arg, REAL_TYPE))
7967 return NULL_TREE;
7968
7969 /* Optimize lround of constant value. */
7970 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7971 {
7972 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7973
7974 if (real_isfinite (&x))
7975 {
7976 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7977 tree ftype = TREE_TYPE (arg);
7978 REAL_VALUE_TYPE r;
7979 bool fail = false;
7980
7981 switch (DECL_FUNCTION_CODE (fndecl))
7982 {
7983 CASE_FLT_FN (BUILT_IN_IFLOOR):
7984 CASE_FLT_FN (BUILT_IN_LFLOOR):
7985 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7986 real_floor (&r, TYPE_MODE (ftype), &x);
7987 break;
7988
7989 CASE_FLT_FN (BUILT_IN_ICEIL):
7990 CASE_FLT_FN (BUILT_IN_LCEIL):
7991 CASE_FLT_FN (BUILT_IN_LLCEIL):
7992 real_ceil (&r, TYPE_MODE (ftype), &x);
7993 break;
7994
7995 CASE_FLT_FN (BUILT_IN_IROUND):
7996 CASE_FLT_FN (BUILT_IN_LROUND):
7997 CASE_FLT_FN (BUILT_IN_LLROUND):
7998 real_round (&r, TYPE_MODE (ftype), &x);
7999 break;
8000
8001 default:
8002 gcc_unreachable ();
8003 }
8004
8005 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
8006 if (!fail)
8007 return wide_int_to_tree (itype, val);
8008 }
8009 }
8010
8011 switch (DECL_FUNCTION_CODE (fndecl))
8012 {
8013 CASE_FLT_FN (BUILT_IN_LFLOOR):
8014 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8015 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8016 if (tree_expr_nonnegative_p (arg))
8017 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8018 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8019 break;
8020 default:;
8021 }
8022
8023 return fold_fixed_mathfn (loc, fndecl, arg);
8024 }
8025
8026 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8027 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8028 the argument to the call. Return NULL_TREE if no simplification can
8029 be made. */
8030
8031 static tree
8032 fold_builtin_bitop (tree fndecl, tree arg)
8033 {
8034 if (!validate_arg (arg, INTEGER_TYPE))
8035 return NULL_TREE;
8036
8037 /* Optimize for constant argument. */
8038 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8039 {
8040 tree type = TREE_TYPE (arg);
8041 int result;
8042
8043 switch (DECL_FUNCTION_CODE (fndecl))
8044 {
8045 CASE_INT_FN (BUILT_IN_FFS):
8046 result = wi::ffs (arg);
8047 break;
8048
8049 CASE_INT_FN (BUILT_IN_CLZ):
8050 if (wi::ne_p (arg, 0))
8051 result = wi::clz (arg);
8052 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8053 result = TYPE_PRECISION (type);
8054 break;
8055
8056 CASE_INT_FN (BUILT_IN_CTZ):
8057 if (wi::ne_p (arg, 0))
8058 result = wi::ctz (arg);
8059 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8060 result = TYPE_PRECISION (type);
8061 break;
8062
8063 CASE_INT_FN (BUILT_IN_CLRSB):
8064 result = wi::clrsb (arg);
8065 break;
8066
8067 CASE_INT_FN (BUILT_IN_POPCOUNT):
8068 result = wi::popcount (arg);
8069 break;
8070
8071 CASE_INT_FN (BUILT_IN_PARITY):
8072 result = wi::parity (arg);
8073 break;
8074
8075 default:
8076 gcc_unreachable ();
8077 }
8078
8079 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8080 }
8081
8082 return NULL_TREE;
8083 }
8084
8085 /* Fold function call to builtin_bswap and the short, long and long long
8086 variants. Return NULL_TREE if no simplification can be made. */
8087 static tree
8088 fold_builtin_bswap (tree fndecl, tree arg)
8089 {
8090 if (! validate_arg (arg, INTEGER_TYPE))
8091 return NULL_TREE;
8092
8093 /* Optimize constant value. */
8094 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8095 {
8096 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8097
8098 switch (DECL_FUNCTION_CODE (fndecl))
8099 {
8100 case BUILT_IN_BSWAP16:
8101 case BUILT_IN_BSWAP32:
8102 case BUILT_IN_BSWAP64:
8103 {
8104 signop sgn = TYPE_SIGN (type);
8105 tree result =
8106 wide_int_to_tree (type,
8107 wide_int::from (arg, TYPE_PRECISION (type),
8108 sgn).bswap ());
8109 return result;
8110 }
8111 default:
8112 gcc_unreachable ();
8113 }
8114 }
8115
8116 return NULL_TREE;
8117 }
8118
8119 /* A subroutine of fold_builtin to fold the various logarithmic
8120 functions. Return NULL_TREE if no simplification can me made.
8121 FUNC is the corresponding MPFR logarithm function. */
8122
8123 static tree
8124 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8125 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8126 {
8127 if (validate_arg (arg, REAL_TYPE))
8128 {
8129 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8130 tree res;
8131 const enum built_in_function fcode = builtin_mathfn_code (arg);
8132
8133 /* Calculate the result when the argument is a constant. */
8134 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8135 return res;
8136
8137 /* Special case, optimize logN(expN(x)) = x. */
8138 if (flag_unsafe_math_optimizations
8139 && ((func == mpfr_log
8140 && (fcode == BUILT_IN_EXP
8141 || fcode == BUILT_IN_EXPF
8142 || fcode == BUILT_IN_EXPL))
8143 || (func == mpfr_log2
8144 && (fcode == BUILT_IN_EXP2
8145 || fcode == BUILT_IN_EXP2F
8146 || fcode == BUILT_IN_EXP2L))
8147 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8148 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8149
8150 /* Optimize logN(func()) for various exponential functions. We
8151 want to determine the value "x" and the power "exponent" in
8152 order to transform logN(x**exponent) into exponent*logN(x). */
8153 if (flag_unsafe_math_optimizations)
8154 {
8155 tree exponent = 0, x = 0;
8156
8157 switch (fcode)
8158 {
8159 CASE_FLT_FN (BUILT_IN_EXP):
8160 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8161 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8162 dconst_e ()));
8163 exponent = CALL_EXPR_ARG (arg, 0);
8164 break;
8165 CASE_FLT_FN (BUILT_IN_EXP2):
8166 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8167 x = build_real (type, dconst2);
8168 exponent = CALL_EXPR_ARG (arg, 0);
8169 break;
8170 CASE_FLT_FN (BUILT_IN_EXP10):
8171 CASE_FLT_FN (BUILT_IN_POW10):
8172 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8173 {
8174 REAL_VALUE_TYPE dconst10;
8175 real_from_integer (&dconst10, VOIDmode, 10, SIGNED);
8176 x = build_real (type, dconst10);
8177 }
8178 exponent = CALL_EXPR_ARG (arg, 0);
8179 break;
8180 CASE_FLT_FN (BUILT_IN_SQRT):
8181 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8182 x = CALL_EXPR_ARG (arg, 0);
8183 exponent = build_real (type, dconsthalf);
8184 break;
8185 CASE_FLT_FN (BUILT_IN_CBRT):
8186 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8187 x = CALL_EXPR_ARG (arg, 0);
8188 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8189 dconst_third ()));
8190 break;
8191 CASE_FLT_FN (BUILT_IN_POW):
8192 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8193 x = CALL_EXPR_ARG (arg, 0);
8194 exponent = CALL_EXPR_ARG (arg, 1);
8195 break;
8196 default:
8197 break;
8198 }
8199
8200 /* Now perform the optimization. */
8201 if (x && exponent)
8202 {
8203 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8204 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8205 }
8206 }
8207 }
8208
8209 return NULL_TREE;
8210 }
8211
8212 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8213 NULL_TREE if no simplification can be made. */
8214
8215 static tree
8216 fold_builtin_hypot (location_t loc, tree fndecl,
8217 tree arg0, tree arg1, tree type)
8218 {
8219 tree res, narg0, narg1;
8220
8221 if (!validate_arg (arg0, REAL_TYPE)
8222 || !validate_arg (arg1, REAL_TYPE))
8223 return NULL_TREE;
8224
8225 /* Calculate the result when the argument is a constant. */
8226 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8227 return res;
8228
8229 /* If either argument to hypot has a negate or abs, strip that off.
8230 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8231 narg0 = fold_strip_sign_ops (arg0);
8232 narg1 = fold_strip_sign_ops (arg1);
8233 if (narg0 || narg1)
8234 {
8235 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8236 narg1 ? narg1 : arg1);
8237 }
8238
8239 /* If either argument is zero, hypot is fabs of the other. */
8240 if (real_zerop (arg0))
8241 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8242 else if (real_zerop (arg1))
8243 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8244
8245 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8246 if (flag_unsafe_math_optimizations
8247 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8248 {
8249 const REAL_VALUE_TYPE sqrt2_trunc
8250 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8251 return fold_build2_loc (loc, MULT_EXPR, type,
8252 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8253 build_real (type, sqrt2_trunc));
8254 }
8255
8256 return NULL_TREE;
8257 }
8258
8259
8260 /* Fold a builtin function call to pow, powf, or powl. Return
8261 NULL_TREE if no simplification can be made. */
8262 static tree
8263 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8264 {
8265 tree res;
8266
8267 if (!validate_arg (arg0, REAL_TYPE)
8268 || !validate_arg (arg1, REAL_TYPE))
8269 return NULL_TREE;
8270
8271 /* Calculate the result when the argument is a constant. */
8272 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8273 return res;
8274
8275 /* Optimize pow(1.0,y) = 1.0. */
8276 if (real_onep (arg0))
8277 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8278
8279 if (TREE_CODE (arg1) == REAL_CST
8280 && !TREE_OVERFLOW (arg1))
8281 {
8282 REAL_VALUE_TYPE cint;
8283 REAL_VALUE_TYPE c;
8284 HOST_WIDE_INT n;
8285
8286 c = TREE_REAL_CST (arg1);
8287
8288 /* Optimize pow(x,0.0) = 1.0. */
8289 if (REAL_VALUES_EQUAL (c, dconst0))
8290 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8291 arg0);
8292
8293 /* Optimize pow(x,1.0) = x. */
8294 if (REAL_VALUES_EQUAL (c, dconst1))
8295 return arg0;
8296
8297 /* Optimize pow(x,-1.0) = 1.0/x. */
8298 if (REAL_VALUES_EQUAL (c, dconstm1))
8299 return fold_build2_loc (loc, RDIV_EXPR, type,
8300 build_real (type, dconst1), arg0);
8301
8302 /* Optimize pow(x,0.5) = sqrt(x). */
8303 if (flag_unsafe_math_optimizations
8304 && REAL_VALUES_EQUAL (c, dconsthalf))
8305 {
8306 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8307
8308 if (sqrtfn != NULL_TREE)
8309 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8310 }
8311
8312 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8313 if (flag_unsafe_math_optimizations)
8314 {
8315 const REAL_VALUE_TYPE dconstroot
8316 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8317
8318 if (REAL_VALUES_EQUAL (c, dconstroot))
8319 {
8320 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8321 if (cbrtfn != NULL_TREE)
8322 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8323 }
8324 }
8325
8326 /* Check for an integer exponent. */
8327 n = real_to_integer (&c);
8328 real_from_integer (&cint, VOIDmode, n, SIGNED);
8329 if (real_identical (&c, &cint))
8330 {
8331 /* Attempt to evaluate pow at compile-time, unless this should
8332 raise an exception. */
8333 if (TREE_CODE (arg0) == REAL_CST
8334 && !TREE_OVERFLOW (arg0)
8335 && (n > 0
8336 || (!flag_trapping_math && !flag_errno_math)
8337 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8338 {
8339 REAL_VALUE_TYPE x;
8340 bool inexact;
8341
8342 x = TREE_REAL_CST (arg0);
8343 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8344 if (flag_unsafe_math_optimizations || !inexact)
8345 return build_real (type, x);
8346 }
8347
8348 /* Strip sign ops from even integer powers. */
8349 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8350 {
8351 tree narg0 = fold_strip_sign_ops (arg0);
8352 if (narg0)
8353 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8354 }
8355 }
8356 }
8357
8358 if (flag_unsafe_math_optimizations)
8359 {
8360 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8361
8362 /* Optimize pow(expN(x),y) = expN(x*y). */
8363 if (BUILTIN_EXPONENT_P (fcode))
8364 {
8365 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8366 tree arg = CALL_EXPR_ARG (arg0, 0);
8367 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8368 return build_call_expr_loc (loc, expfn, 1, arg);
8369 }
8370
8371 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8372 if (BUILTIN_SQRT_P (fcode))
8373 {
8374 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8375 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8376 build_real (type, dconsthalf));
8377 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8378 }
8379
8380 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8381 if (BUILTIN_CBRT_P (fcode))
8382 {
8383 tree arg = CALL_EXPR_ARG (arg0, 0);
8384 if (tree_expr_nonnegative_p (arg))
8385 {
8386 const REAL_VALUE_TYPE dconstroot
8387 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8388 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8389 build_real (type, dconstroot));
8390 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8391 }
8392 }
8393
8394 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8395 if (fcode == BUILT_IN_POW
8396 || fcode == BUILT_IN_POWF
8397 || fcode == BUILT_IN_POWL)
8398 {
8399 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8400 if (tree_expr_nonnegative_p (arg00))
8401 {
8402 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8403 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8404 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8405 }
8406 }
8407 }
8408
8409 return NULL_TREE;
8410 }
8411
8412 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8413 Return NULL_TREE if no simplification can be made. */
8414 static tree
8415 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8416 tree arg0, tree arg1, tree type)
8417 {
8418 if (!validate_arg (arg0, REAL_TYPE)
8419 || !validate_arg (arg1, INTEGER_TYPE))
8420 return NULL_TREE;
8421
8422 /* Optimize pow(1.0,y) = 1.0. */
8423 if (real_onep (arg0))
8424 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8425
8426 if (tree_fits_shwi_p (arg1))
8427 {
8428 HOST_WIDE_INT c = tree_to_shwi (arg1);
8429
8430 /* Evaluate powi at compile-time. */
8431 if (TREE_CODE (arg0) == REAL_CST
8432 && !TREE_OVERFLOW (arg0))
8433 {
8434 REAL_VALUE_TYPE x;
8435 x = TREE_REAL_CST (arg0);
8436 real_powi (&x, TYPE_MODE (type), &x, c);
8437 return build_real (type, x);
8438 }
8439
8440 /* Optimize pow(x,0) = 1.0. */
8441 if (c == 0)
8442 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8443 arg0);
8444
8445 /* Optimize pow(x,1) = x. */
8446 if (c == 1)
8447 return arg0;
8448
8449 /* Optimize pow(x,-1) = 1.0/x. */
8450 if (c == -1)
8451 return fold_build2_loc (loc, RDIV_EXPR, type,
8452 build_real (type, dconst1), arg0);
8453 }
8454
8455 return NULL_TREE;
8456 }
8457
8458 /* A subroutine of fold_builtin to fold the various exponent
8459 functions. Return NULL_TREE if no simplification can be made.
8460 FUNC is the corresponding MPFR exponent function. */
8461
8462 static tree
8463 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8464 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8465 {
8466 if (validate_arg (arg, REAL_TYPE))
8467 {
8468 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8469 tree res;
8470
8471 /* Calculate the result when the argument is a constant. */
8472 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8473 return res;
8474
8475 /* Optimize expN(logN(x)) = x. */
8476 if (flag_unsafe_math_optimizations)
8477 {
8478 const enum built_in_function fcode = builtin_mathfn_code (arg);
8479
8480 if ((func == mpfr_exp
8481 && (fcode == BUILT_IN_LOG
8482 || fcode == BUILT_IN_LOGF
8483 || fcode == BUILT_IN_LOGL))
8484 || (func == mpfr_exp2
8485 && (fcode == BUILT_IN_LOG2
8486 || fcode == BUILT_IN_LOG2F
8487 || fcode == BUILT_IN_LOG2L))
8488 || (func == mpfr_exp10
8489 && (fcode == BUILT_IN_LOG10
8490 || fcode == BUILT_IN_LOG10F
8491 || fcode == BUILT_IN_LOG10L)))
8492 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8493 }
8494 }
8495
8496 return NULL_TREE;
8497 }
8498
8499 /* Return true if VAR is a VAR_DECL or a component thereof. */
8500
8501 static bool
8502 var_decl_component_p (tree var)
8503 {
8504 tree inner = var;
8505 while (handled_component_p (inner))
8506 inner = TREE_OPERAND (inner, 0);
8507 return SSA_VAR_P (inner);
8508 }
8509
8510 /* Fold function call to builtin memset. Return
8511 NULL_TREE if no simplification can be made. */
8512
8513 static tree
8514 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8515 tree type, bool ignore)
8516 {
8517 tree var, ret, etype;
8518 unsigned HOST_WIDE_INT length, cval;
8519
8520 if (! validate_arg (dest, POINTER_TYPE)
8521 || ! validate_arg (c, INTEGER_TYPE)
8522 || ! validate_arg (len, INTEGER_TYPE))
8523 return NULL_TREE;
8524
8525 if (! tree_fits_uhwi_p (len))
8526 return NULL_TREE;
8527
8528 /* If the LEN parameter is zero, return DEST. */
8529 if (integer_zerop (len))
8530 return omit_one_operand_loc (loc, type, dest, c);
8531
8532 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8533 return NULL_TREE;
8534
8535 var = dest;
8536 STRIP_NOPS (var);
8537 if (TREE_CODE (var) != ADDR_EXPR)
8538 return NULL_TREE;
8539
8540 var = TREE_OPERAND (var, 0);
8541 if (TREE_THIS_VOLATILE (var))
8542 return NULL_TREE;
8543
8544 etype = TREE_TYPE (var);
8545 if (TREE_CODE (etype) == ARRAY_TYPE)
8546 etype = TREE_TYPE (etype);
8547
8548 if (!INTEGRAL_TYPE_P (etype)
8549 && !POINTER_TYPE_P (etype))
8550 return NULL_TREE;
8551
8552 if (! var_decl_component_p (var))
8553 return NULL_TREE;
8554
8555 length = tree_to_uhwi (len);
8556 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8557 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8558 return NULL_TREE;
8559
8560 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8561 return NULL_TREE;
8562
8563 if (integer_zerop (c))
8564 cval = 0;
8565 else
8566 {
8567 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8568 return NULL_TREE;
8569
8570 cval = TREE_INT_CST_LOW (c);
8571 cval &= 0xff;
8572 cval |= cval << 8;
8573 cval |= cval << 16;
8574 cval |= (cval << 31) << 1;
8575 }
8576
8577 ret = build_int_cst_type (etype, cval);
8578 var = build_fold_indirect_ref_loc (loc,
8579 fold_convert_loc (loc,
8580 build_pointer_type (etype),
8581 dest));
8582 ret = build2 (MODIFY_EXPR, etype, var, ret);
8583 if (ignore)
8584 return ret;
8585
8586 return omit_one_operand_loc (loc, type, dest, ret);
8587 }
8588
8589 /* Fold function call to builtin memset. Return
8590 NULL_TREE if no simplification can be made. */
8591
8592 static tree
8593 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8594 {
8595 if (! validate_arg (dest, POINTER_TYPE)
8596 || ! validate_arg (size, INTEGER_TYPE))
8597 return NULL_TREE;
8598
8599 if (!ignore)
8600 return NULL_TREE;
8601
8602 /* New argument list transforming bzero(ptr x, int y) to
8603 memset(ptr x, int 0, size_t y). This is done this way
8604 so that if it isn't expanded inline, we fallback to
8605 calling bzero instead of memset. */
8606
8607 return fold_builtin_memset (loc, dest, integer_zero_node,
8608 fold_convert_loc (loc, size_type_node, size),
8609 void_type_node, ignore);
8610 }
8611
8612 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8613 NULL_TREE if no simplification can be made.
8614 If ENDP is 0, return DEST (like memcpy).
8615 If ENDP is 1, return DEST+LEN (like mempcpy).
8616 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8617 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8618 (memmove). */
8619
8620 static tree
8621 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8622 tree len, tree type, bool ignore, int endp)
8623 {
8624 tree destvar, srcvar, expr;
8625
8626 if (! validate_arg (dest, POINTER_TYPE)
8627 || ! validate_arg (src, POINTER_TYPE)
8628 || ! validate_arg (len, INTEGER_TYPE))
8629 return NULL_TREE;
8630
8631 /* If the LEN parameter is zero, return DEST. */
8632 if (integer_zerop (len))
8633 return omit_one_operand_loc (loc, type, dest, src);
8634
8635 /* If SRC and DEST are the same (and not volatile), return
8636 DEST{,+LEN,+LEN-1}. */
8637 if (operand_equal_p (src, dest, 0))
8638 expr = len;
8639 else
8640 {
8641 tree srctype, desttype;
8642 unsigned int src_align, dest_align;
8643 tree off0;
8644
8645 /* Build accesses at offset zero with a ref-all character type. */
8646 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8647 ptr_mode, true), 0);
8648
8649 /* If we can perform the copy efficiently with first doing all loads
8650 and then all stores inline it that way. Currently efficiently
8651 means that we can load all the memory into a single integer
8652 register which is what MOVE_MAX gives us. */
8653 src_align = get_pointer_alignment (src);
8654 dest_align = get_pointer_alignment (dest);
8655 if (tree_fits_uhwi_p (len)
8656 && compare_tree_int (len, MOVE_MAX) <= 0
8657 /* ??? Don't transform copies from strings with known length this
8658 confuses the tree-ssa-strlen.c. This doesn't handle
8659 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
8660 reason. */
8661 && !c_strlen (src, 2))
8662 {
8663 unsigned ilen = tree_to_uhwi (len);
8664 if (exact_log2 (ilen) != -1)
8665 {
8666 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
8667 if (type
8668 && TYPE_MODE (type) != BLKmode
8669 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
8670 == ilen * 8)
8671 /* If the pointers are not aligned we must be able to
8672 emit an unaligned load. */
8673 && ((src_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
8674 && dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8675 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
8676 MIN (src_align, dest_align))))
8677 {
8678 tree srctype = type;
8679 tree desttype = type;
8680 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8681 srctype = build_aligned_type (type, src_align);
8682 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8683 desttype = build_aligned_type (type, dest_align);
8684 if (!ignore)
8685 dest = builtin_save_expr (dest);
8686 expr = build2 (MODIFY_EXPR, type,
8687 fold_build2 (MEM_REF, desttype, dest, off0),
8688 fold_build2 (MEM_REF, srctype, src, off0));
8689 goto done;
8690 }
8691 }
8692 }
8693
8694 if (endp == 3)
8695 {
8696 /* Both DEST and SRC must be pointer types.
8697 ??? This is what old code did. Is the testing for pointer types
8698 really mandatory?
8699
8700 If either SRC is readonly or length is 1, we can use memcpy. */
8701 if (!dest_align || !src_align)
8702 return NULL_TREE;
8703 if (readonly_data_expr (src)
8704 || (tree_fits_uhwi_p (len)
8705 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8706 >= tree_to_uhwi (len))))
8707 {
8708 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8709 if (!fn)
8710 return NULL_TREE;
8711 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8712 }
8713
8714 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8715 if (TREE_CODE (src) == ADDR_EXPR
8716 && TREE_CODE (dest) == ADDR_EXPR)
8717 {
8718 tree src_base, dest_base, fn;
8719 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8720 HOST_WIDE_INT size = -1;
8721 HOST_WIDE_INT maxsize = -1;
8722
8723 srcvar = TREE_OPERAND (src, 0);
8724 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8725 &size, &maxsize);
8726 destvar = TREE_OPERAND (dest, 0);
8727 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8728 &size, &maxsize);
8729 if (tree_fits_uhwi_p (len))
8730 maxsize = tree_to_uhwi (len);
8731 else
8732 maxsize = -1;
8733 src_offset /= BITS_PER_UNIT;
8734 dest_offset /= BITS_PER_UNIT;
8735 if (SSA_VAR_P (src_base)
8736 && SSA_VAR_P (dest_base))
8737 {
8738 if (operand_equal_p (src_base, dest_base, 0)
8739 && ranges_overlap_p (src_offset, maxsize,
8740 dest_offset, maxsize))
8741 return NULL_TREE;
8742 }
8743 else if (TREE_CODE (src_base) == MEM_REF
8744 && TREE_CODE (dest_base) == MEM_REF)
8745 {
8746 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8747 TREE_OPERAND (dest_base, 0), 0))
8748 return NULL_TREE;
8749 offset_int off = mem_ref_offset (src_base) + src_offset;
8750 if (!wi::fits_shwi_p (off))
8751 return NULL_TREE;
8752 src_offset = off.to_shwi ();
8753
8754 off = mem_ref_offset (dest_base) + dest_offset;
8755 if (!wi::fits_shwi_p (off))
8756 return NULL_TREE;
8757 dest_offset = off.to_shwi ();
8758 if (ranges_overlap_p (src_offset, maxsize,
8759 dest_offset, maxsize))
8760 return NULL_TREE;
8761 }
8762 else
8763 return NULL_TREE;
8764
8765 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8766 if (!fn)
8767 return NULL_TREE;
8768 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8769 }
8770
8771 /* If the destination and source do not alias optimize into
8772 memcpy as well. */
8773 if ((is_gimple_min_invariant (dest)
8774 || TREE_CODE (dest) == SSA_NAME)
8775 && (is_gimple_min_invariant (src)
8776 || TREE_CODE (src) == SSA_NAME))
8777 {
8778 ao_ref destr, srcr;
8779 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8780 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8781 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8782 {
8783 tree fn;
8784 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8785 if (!fn)
8786 return NULL_TREE;
8787 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8788 }
8789 }
8790
8791 return NULL_TREE;
8792 }
8793
8794 if (!tree_fits_shwi_p (len))
8795 return NULL_TREE;
8796 /* FIXME:
8797 This logic lose for arguments like (type *)malloc (sizeof (type)),
8798 since we strip the casts of up to VOID return value from malloc.
8799 Perhaps we ought to inherit type from non-VOID argument here? */
8800 STRIP_NOPS (src);
8801 STRIP_NOPS (dest);
8802 if (!POINTER_TYPE_P (TREE_TYPE (src))
8803 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8804 return NULL_TREE;
8805 /* In the following try to find a type that is most natural to be
8806 used for the memcpy source and destination and that allows
8807 the most optimization when memcpy is turned into a plain assignment
8808 using that type. In theory we could always use a char[len] type
8809 but that only gains us that the destination and source possibly
8810 no longer will have their address taken. */
8811 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8812 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8813 {
8814 tree tem = TREE_OPERAND (src, 0);
8815 STRIP_NOPS (tem);
8816 if (tem != TREE_OPERAND (src, 0))
8817 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8818 }
8819 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8820 {
8821 tree tem = TREE_OPERAND (dest, 0);
8822 STRIP_NOPS (tem);
8823 if (tem != TREE_OPERAND (dest, 0))
8824 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8825 }
8826 srctype = TREE_TYPE (TREE_TYPE (src));
8827 if (TREE_CODE (srctype) == ARRAY_TYPE
8828 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8829 {
8830 srctype = TREE_TYPE (srctype);
8831 STRIP_NOPS (src);
8832 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8833 }
8834 desttype = TREE_TYPE (TREE_TYPE (dest));
8835 if (TREE_CODE (desttype) == ARRAY_TYPE
8836 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8837 {
8838 desttype = TREE_TYPE (desttype);
8839 STRIP_NOPS (dest);
8840 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8841 }
8842 if (TREE_ADDRESSABLE (srctype)
8843 || TREE_ADDRESSABLE (desttype))
8844 return NULL_TREE;
8845
8846 /* Make sure we are not copying using a floating-point mode or
8847 a type whose size possibly does not match its precision. */
8848 if (FLOAT_MODE_P (TYPE_MODE (desttype))
8849 || TREE_CODE (desttype) == BOOLEAN_TYPE
8850 || TREE_CODE (desttype) == ENUMERAL_TYPE)
8851 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
8852 if (FLOAT_MODE_P (TYPE_MODE (srctype))
8853 || TREE_CODE (srctype) == BOOLEAN_TYPE
8854 || TREE_CODE (srctype) == ENUMERAL_TYPE)
8855 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
8856 if (!srctype)
8857 srctype = desttype;
8858 if (!desttype)
8859 desttype = srctype;
8860 if (!srctype)
8861 return NULL_TREE;
8862
8863 src_align = get_pointer_alignment (src);
8864 dest_align = get_pointer_alignment (dest);
8865 if (dest_align < TYPE_ALIGN (desttype)
8866 || src_align < TYPE_ALIGN (srctype))
8867 return NULL_TREE;
8868
8869 if (!ignore)
8870 dest = builtin_save_expr (dest);
8871
8872 destvar = dest;
8873 STRIP_NOPS (destvar);
8874 if (TREE_CODE (destvar) == ADDR_EXPR
8875 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8876 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8877 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8878 else
8879 destvar = NULL_TREE;
8880
8881 srcvar = src;
8882 STRIP_NOPS (srcvar);
8883 if (TREE_CODE (srcvar) == ADDR_EXPR
8884 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8885 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8886 {
8887 if (!destvar
8888 || src_align >= TYPE_ALIGN (desttype))
8889 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8890 srcvar, off0);
8891 else if (!STRICT_ALIGNMENT)
8892 {
8893 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8894 src_align);
8895 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8896 }
8897 else
8898 srcvar = NULL_TREE;
8899 }
8900 else
8901 srcvar = NULL_TREE;
8902
8903 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8904 return NULL_TREE;
8905
8906 if (srcvar == NULL_TREE)
8907 {
8908 STRIP_NOPS (src);
8909 if (src_align >= TYPE_ALIGN (desttype))
8910 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8911 else
8912 {
8913 if (STRICT_ALIGNMENT)
8914 return NULL_TREE;
8915 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8916 src_align);
8917 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8918 }
8919 }
8920 else if (destvar == NULL_TREE)
8921 {
8922 STRIP_NOPS (dest);
8923 if (dest_align >= TYPE_ALIGN (srctype))
8924 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8925 else
8926 {
8927 if (STRICT_ALIGNMENT)
8928 return NULL_TREE;
8929 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8930 dest_align);
8931 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8932 }
8933 }
8934
8935 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8936 }
8937
8938 done:
8939 if (ignore)
8940 return expr;
8941
8942 if (endp == 0 || endp == 3)
8943 return omit_one_operand_loc (loc, type, dest, expr);
8944
8945 if (expr == len)
8946 expr = NULL_TREE;
8947
8948 if (endp == 2)
8949 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8950 ssize_int (1));
8951
8952 dest = fold_build_pointer_plus_loc (loc, dest, len);
8953 dest = fold_convert_loc (loc, type, dest);
8954 if (expr)
8955 dest = omit_one_operand_loc (loc, type, dest, expr);
8956 return dest;
8957 }
8958
8959 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8960 If LEN is not NULL, it represents the length of the string to be
8961 copied. Return NULL_TREE if no simplification can be made. */
8962
8963 tree
8964 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8965 {
8966 tree fn;
8967
8968 if (!validate_arg (dest, POINTER_TYPE)
8969 || !validate_arg (src, POINTER_TYPE))
8970 return NULL_TREE;
8971
8972 /* If SRC and DEST are the same (and not volatile), return DEST. */
8973 if (operand_equal_p (src, dest, 0))
8974 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8975
8976 if (optimize_function_for_size_p (cfun))
8977 return NULL_TREE;
8978
8979 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8980 if (!fn)
8981 return NULL_TREE;
8982
8983 if (!len)
8984 {
8985 len = c_strlen (src, 1);
8986 if (! len || TREE_SIDE_EFFECTS (len))
8987 return NULL_TREE;
8988 }
8989
8990 len = fold_convert_loc (loc, size_type_node, len);
8991 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
8992 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8993 build_call_expr_loc (loc, fn, 3, dest, src, len));
8994 }
8995
8996 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8997 Return NULL_TREE if no simplification can be made. */
8998
8999 static tree
9000 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9001 {
9002 tree fn, len, lenp1, call, type;
9003
9004 if (!validate_arg (dest, POINTER_TYPE)
9005 || !validate_arg (src, POINTER_TYPE))
9006 return NULL_TREE;
9007
9008 len = c_strlen (src, 1);
9009 if (!len
9010 || TREE_CODE (len) != INTEGER_CST)
9011 return NULL_TREE;
9012
9013 if (optimize_function_for_size_p (cfun)
9014 /* If length is zero it's small enough. */
9015 && !integer_zerop (len))
9016 return NULL_TREE;
9017
9018 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9019 if (!fn)
9020 return NULL_TREE;
9021
9022 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9023 fold_convert_loc (loc, size_type_node, len),
9024 build_int_cst (size_type_node, 1));
9025 /* We use dest twice in building our expression. Save it from
9026 multiple expansions. */
9027 dest = builtin_save_expr (dest);
9028 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9029
9030 type = TREE_TYPE (TREE_TYPE (fndecl));
9031 dest = fold_build_pointer_plus_loc (loc, dest, len);
9032 dest = fold_convert_loc (loc, type, dest);
9033 dest = omit_one_operand_loc (loc, type, dest, call);
9034 return dest;
9035 }
9036
9037 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9038 If SLEN is not NULL, it represents the length of the source string.
9039 Return NULL_TREE if no simplification can be made. */
9040
9041 tree
9042 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9043 tree src, tree len, tree slen)
9044 {
9045 tree fn;
9046
9047 if (!validate_arg (dest, POINTER_TYPE)
9048 || !validate_arg (src, POINTER_TYPE)
9049 || !validate_arg (len, INTEGER_TYPE))
9050 return NULL_TREE;
9051
9052 /* If the LEN parameter is zero, return DEST. */
9053 if (integer_zerop (len))
9054 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9055
9056 /* We can't compare slen with len as constants below if len is not a
9057 constant. */
9058 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9059 return NULL_TREE;
9060
9061 if (!slen)
9062 slen = c_strlen (src, 1);
9063
9064 /* Now, we must be passed a constant src ptr parameter. */
9065 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9066 return NULL_TREE;
9067
9068 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9069
9070 /* We do not support simplification of this case, though we do
9071 support it when expanding trees into RTL. */
9072 /* FIXME: generate a call to __builtin_memset. */
9073 if (tree_int_cst_lt (slen, len))
9074 return NULL_TREE;
9075
9076 /* OK transform into builtin memcpy. */
9077 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9078 if (!fn)
9079 return NULL_TREE;
9080
9081 len = fold_convert_loc (loc, size_type_node, len);
9082 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9083 build_call_expr_loc (loc, fn, 3, dest, src, len));
9084 }
9085
9086 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9087 arguments to the call, and TYPE is its return type.
9088 Return NULL_TREE if no simplification can be made. */
9089
9090 static tree
9091 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9092 {
9093 if (!validate_arg (arg1, POINTER_TYPE)
9094 || !validate_arg (arg2, INTEGER_TYPE)
9095 || !validate_arg (len, INTEGER_TYPE))
9096 return NULL_TREE;
9097 else
9098 {
9099 const char *p1;
9100
9101 if (TREE_CODE (arg2) != INTEGER_CST
9102 || !tree_fits_uhwi_p (len))
9103 return NULL_TREE;
9104
9105 p1 = c_getstr (arg1);
9106 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9107 {
9108 char c;
9109 const char *r;
9110 tree tem;
9111
9112 if (target_char_cast (arg2, &c))
9113 return NULL_TREE;
9114
9115 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9116
9117 if (r == NULL)
9118 return build_int_cst (TREE_TYPE (arg1), 0);
9119
9120 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9121 return fold_convert_loc (loc, type, tem);
9122 }
9123 return NULL_TREE;
9124 }
9125 }
9126
9127 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9128 Return NULL_TREE if no simplification can be made. */
9129
9130 static tree
9131 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9132 {
9133 const char *p1, *p2;
9134
9135 if (!validate_arg (arg1, POINTER_TYPE)
9136 || !validate_arg (arg2, POINTER_TYPE)
9137 || !validate_arg (len, INTEGER_TYPE))
9138 return NULL_TREE;
9139
9140 /* If the LEN parameter is zero, return zero. */
9141 if (integer_zerop (len))
9142 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9143 arg1, arg2);
9144
9145 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9146 if (operand_equal_p (arg1, arg2, 0))
9147 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9148
9149 p1 = c_getstr (arg1);
9150 p2 = c_getstr (arg2);
9151
9152 /* If all arguments are constant, and the value of len is not greater
9153 than the lengths of arg1 and arg2, evaluate at compile-time. */
9154 if (tree_fits_uhwi_p (len) && p1 && p2
9155 && compare_tree_int (len, strlen (p1) + 1) <= 0
9156 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9157 {
9158 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9159
9160 if (r > 0)
9161 return integer_one_node;
9162 else if (r < 0)
9163 return integer_minus_one_node;
9164 else
9165 return integer_zero_node;
9166 }
9167
9168 /* If len parameter is one, return an expression corresponding to
9169 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9170 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9171 {
9172 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9173 tree cst_uchar_ptr_node
9174 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9175
9176 tree ind1
9177 = fold_convert_loc (loc, integer_type_node,
9178 build1 (INDIRECT_REF, cst_uchar_node,
9179 fold_convert_loc (loc,
9180 cst_uchar_ptr_node,
9181 arg1)));
9182 tree ind2
9183 = fold_convert_loc (loc, integer_type_node,
9184 build1 (INDIRECT_REF, cst_uchar_node,
9185 fold_convert_loc (loc,
9186 cst_uchar_ptr_node,
9187 arg2)));
9188 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9189 }
9190
9191 return NULL_TREE;
9192 }
9193
9194 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9195 Return NULL_TREE if no simplification can be made. */
9196
9197 static tree
9198 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9199 {
9200 const char *p1, *p2;
9201
9202 if (!validate_arg (arg1, POINTER_TYPE)
9203 || !validate_arg (arg2, POINTER_TYPE))
9204 return NULL_TREE;
9205
9206 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9207 if (operand_equal_p (arg1, arg2, 0))
9208 return integer_zero_node;
9209
9210 p1 = c_getstr (arg1);
9211 p2 = c_getstr (arg2);
9212
9213 if (p1 && p2)
9214 {
9215 const int i = strcmp (p1, p2);
9216 if (i < 0)
9217 return integer_minus_one_node;
9218 else if (i > 0)
9219 return integer_one_node;
9220 else
9221 return integer_zero_node;
9222 }
9223
9224 /* If the second arg is "", return *(const unsigned char*)arg1. */
9225 if (p2 && *p2 == '\0')
9226 {
9227 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9228 tree cst_uchar_ptr_node
9229 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9230
9231 return fold_convert_loc (loc, integer_type_node,
9232 build1 (INDIRECT_REF, cst_uchar_node,
9233 fold_convert_loc (loc,
9234 cst_uchar_ptr_node,
9235 arg1)));
9236 }
9237
9238 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9239 if (p1 && *p1 == '\0')
9240 {
9241 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9242 tree cst_uchar_ptr_node
9243 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9244
9245 tree temp
9246 = fold_convert_loc (loc, integer_type_node,
9247 build1 (INDIRECT_REF, cst_uchar_node,
9248 fold_convert_loc (loc,
9249 cst_uchar_ptr_node,
9250 arg2)));
9251 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9252 }
9253
9254 return NULL_TREE;
9255 }
9256
9257 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9258 Return NULL_TREE if no simplification can be made. */
9259
9260 static tree
9261 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9262 {
9263 const char *p1, *p2;
9264
9265 if (!validate_arg (arg1, POINTER_TYPE)
9266 || !validate_arg (arg2, POINTER_TYPE)
9267 || !validate_arg (len, INTEGER_TYPE))
9268 return NULL_TREE;
9269
9270 /* If the LEN parameter is zero, return zero. */
9271 if (integer_zerop (len))
9272 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9273 arg1, arg2);
9274
9275 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9276 if (operand_equal_p (arg1, arg2, 0))
9277 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9278
9279 p1 = c_getstr (arg1);
9280 p2 = c_getstr (arg2);
9281
9282 if (tree_fits_uhwi_p (len) && p1 && p2)
9283 {
9284 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9285 if (i > 0)
9286 return integer_one_node;
9287 else if (i < 0)
9288 return integer_minus_one_node;
9289 else
9290 return integer_zero_node;
9291 }
9292
9293 /* If the second arg is "", and the length is greater than zero,
9294 return *(const unsigned char*)arg1. */
9295 if (p2 && *p2 == '\0'
9296 && TREE_CODE (len) == INTEGER_CST
9297 && tree_int_cst_sgn (len) == 1)
9298 {
9299 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9300 tree cst_uchar_ptr_node
9301 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9302
9303 return fold_convert_loc (loc, integer_type_node,
9304 build1 (INDIRECT_REF, cst_uchar_node,
9305 fold_convert_loc (loc,
9306 cst_uchar_ptr_node,
9307 arg1)));
9308 }
9309
9310 /* If the first arg is "", and the length is greater than zero,
9311 return -*(const unsigned char*)arg2. */
9312 if (p1 && *p1 == '\0'
9313 && TREE_CODE (len) == INTEGER_CST
9314 && tree_int_cst_sgn (len) == 1)
9315 {
9316 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9317 tree cst_uchar_ptr_node
9318 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9319
9320 tree temp = fold_convert_loc (loc, integer_type_node,
9321 build1 (INDIRECT_REF, cst_uchar_node,
9322 fold_convert_loc (loc,
9323 cst_uchar_ptr_node,
9324 arg2)));
9325 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9326 }
9327
9328 /* If len parameter is one, return an expression corresponding to
9329 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9330 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9331 {
9332 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9333 tree cst_uchar_ptr_node
9334 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9335
9336 tree ind1 = fold_convert_loc (loc, integer_type_node,
9337 build1 (INDIRECT_REF, cst_uchar_node,
9338 fold_convert_loc (loc,
9339 cst_uchar_ptr_node,
9340 arg1)));
9341 tree ind2 = fold_convert_loc (loc, integer_type_node,
9342 build1 (INDIRECT_REF, cst_uchar_node,
9343 fold_convert_loc (loc,
9344 cst_uchar_ptr_node,
9345 arg2)));
9346 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9347 }
9348
9349 return NULL_TREE;
9350 }
9351
9352 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9353 ARG. Return NULL_TREE if no simplification can be made. */
9354
9355 static tree
9356 fold_builtin_signbit (location_t loc, tree arg, tree type)
9357 {
9358 if (!validate_arg (arg, REAL_TYPE))
9359 return NULL_TREE;
9360
9361 /* If ARG is a compile-time constant, determine the result. */
9362 if (TREE_CODE (arg) == REAL_CST
9363 && !TREE_OVERFLOW (arg))
9364 {
9365 REAL_VALUE_TYPE c;
9366
9367 c = TREE_REAL_CST (arg);
9368 return (REAL_VALUE_NEGATIVE (c)
9369 ? build_one_cst (type)
9370 : build_zero_cst (type));
9371 }
9372
9373 /* If ARG is non-negative, the result is always zero. */
9374 if (tree_expr_nonnegative_p (arg))
9375 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9376
9377 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9378 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9379 return fold_convert (type,
9380 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9381 build_real (TREE_TYPE (arg), dconst0)));
9382
9383 return NULL_TREE;
9384 }
9385
9386 /* Fold function call to builtin copysign, copysignf or copysignl with
9387 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9388 be made. */
9389
9390 static tree
9391 fold_builtin_copysign (location_t loc, tree fndecl,
9392 tree arg1, tree arg2, tree type)
9393 {
9394 tree tem;
9395
9396 if (!validate_arg (arg1, REAL_TYPE)
9397 || !validate_arg (arg2, REAL_TYPE))
9398 return NULL_TREE;
9399
9400 /* copysign(X,X) is X. */
9401 if (operand_equal_p (arg1, arg2, 0))
9402 return fold_convert_loc (loc, type, arg1);
9403
9404 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9405 if (TREE_CODE (arg1) == REAL_CST
9406 && TREE_CODE (arg2) == REAL_CST
9407 && !TREE_OVERFLOW (arg1)
9408 && !TREE_OVERFLOW (arg2))
9409 {
9410 REAL_VALUE_TYPE c1, c2;
9411
9412 c1 = TREE_REAL_CST (arg1);
9413 c2 = TREE_REAL_CST (arg2);
9414 /* c1.sign := c2.sign. */
9415 real_copysign (&c1, &c2);
9416 return build_real (type, c1);
9417 }
9418
9419 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9420 Remember to evaluate Y for side-effects. */
9421 if (tree_expr_nonnegative_p (arg2))
9422 return omit_one_operand_loc (loc, type,
9423 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9424 arg2);
9425
9426 /* Strip sign changing operations for the first argument. */
9427 tem = fold_strip_sign_ops (arg1);
9428 if (tem)
9429 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9430
9431 return NULL_TREE;
9432 }
9433
9434 /* Fold a call to builtin isascii with argument ARG. */
9435
9436 static tree
9437 fold_builtin_isascii (location_t loc, tree arg)
9438 {
9439 if (!validate_arg (arg, INTEGER_TYPE))
9440 return NULL_TREE;
9441 else
9442 {
9443 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9444 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9445 build_int_cst (integer_type_node,
9446 ~ (unsigned HOST_WIDE_INT) 0x7f));
9447 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9448 arg, integer_zero_node);
9449 }
9450 }
9451
9452 /* Fold a call to builtin toascii with argument ARG. */
9453
9454 static tree
9455 fold_builtin_toascii (location_t loc, tree arg)
9456 {
9457 if (!validate_arg (arg, INTEGER_TYPE))
9458 return NULL_TREE;
9459
9460 /* Transform toascii(c) -> (c & 0x7f). */
9461 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9462 build_int_cst (integer_type_node, 0x7f));
9463 }
9464
9465 /* Fold a call to builtin isdigit with argument ARG. */
9466
9467 static tree
9468 fold_builtin_isdigit (location_t loc, tree arg)
9469 {
9470 if (!validate_arg (arg, INTEGER_TYPE))
9471 return NULL_TREE;
9472 else
9473 {
9474 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9475 /* According to the C standard, isdigit is unaffected by locale.
9476 However, it definitely is affected by the target character set. */
9477 unsigned HOST_WIDE_INT target_digit0
9478 = lang_hooks.to_target_charset ('0');
9479
9480 if (target_digit0 == 0)
9481 return NULL_TREE;
9482
9483 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9484 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9485 build_int_cst (unsigned_type_node, target_digit0));
9486 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9487 build_int_cst (unsigned_type_node, 9));
9488 }
9489 }
9490
9491 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9492
9493 static tree
9494 fold_builtin_fabs (location_t loc, tree arg, tree type)
9495 {
9496 if (!validate_arg (arg, REAL_TYPE))
9497 return NULL_TREE;
9498
9499 arg = fold_convert_loc (loc, type, arg);
9500 if (TREE_CODE (arg) == REAL_CST)
9501 return fold_abs_const (arg, type);
9502 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9503 }
9504
9505 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9506
9507 static tree
9508 fold_builtin_abs (location_t loc, tree arg, tree type)
9509 {
9510 if (!validate_arg (arg, INTEGER_TYPE))
9511 return NULL_TREE;
9512
9513 arg = fold_convert_loc (loc, type, arg);
9514 if (TREE_CODE (arg) == INTEGER_CST)
9515 return fold_abs_const (arg, type);
9516 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9517 }
9518
9519 /* Fold a fma operation with arguments ARG[012]. */
9520
9521 tree
9522 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9523 tree type, tree arg0, tree arg1, tree arg2)
9524 {
9525 if (TREE_CODE (arg0) == REAL_CST
9526 && TREE_CODE (arg1) == REAL_CST
9527 && TREE_CODE (arg2) == REAL_CST)
9528 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9529
9530 return NULL_TREE;
9531 }
9532
9533 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9534
9535 static tree
9536 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9537 {
9538 if (validate_arg (arg0, REAL_TYPE)
9539 && validate_arg (arg1, REAL_TYPE)
9540 && validate_arg (arg2, REAL_TYPE))
9541 {
9542 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9543 if (tem)
9544 return tem;
9545
9546 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9547 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9548 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9549 }
9550 return NULL_TREE;
9551 }
9552
9553 /* Fold a call to builtin fmin or fmax. */
9554
9555 static tree
9556 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9557 tree type, bool max)
9558 {
9559 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9560 {
9561 /* Calculate the result when the argument is a constant. */
9562 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9563
9564 if (res)
9565 return res;
9566
9567 /* If either argument is NaN, return the other one. Avoid the
9568 transformation if we get (and honor) a signalling NaN. Using
9569 omit_one_operand() ensures we create a non-lvalue. */
9570 if (TREE_CODE (arg0) == REAL_CST
9571 && real_isnan (&TREE_REAL_CST (arg0))
9572 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9573 || ! TREE_REAL_CST (arg0).signalling))
9574 return omit_one_operand_loc (loc, type, arg1, arg0);
9575 if (TREE_CODE (arg1) == REAL_CST
9576 && real_isnan (&TREE_REAL_CST (arg1))
9577 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9578 || ! TREE_REAL_CST (arg1).signalling))
9579 return omit_one_operand_loc (loc, type, arg0, arg1);
9580
9581 /* Transform fmin/fmax(x,x) -> x. */
9582 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9583 return omit_one_operand_loc (loc, type, arg0, arg1);
9584
9585 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9586 functions to return the numeric arg if the other one is NaN.
9587 These tree codes don't honor that, so only transform if
9588 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9589 handled, so we don't have to worry about it either. */
9590 if (flag_finite_math_only)
9591 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9592 fold_convert_loc (loc, type, arg0),
9593 fold_convert_loc (loc, type, arg1));
9594 }
9595 return NULL_TREE;
9596 }
9597
9598 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9599
9600 static tree
9601 fold_builtin_carg (location_t loc, tree arg, tree type)
9602 {
9603 if (validate_arg (arg, COMPLEX_TYPE)
9604 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9605 {
9606 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9607
9608 if (atan2_fn)
9609 {
9610 tree new_arg = builtin_save_expr (arg);
9611 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9612 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9613 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9614 }
9615 }
9616
9617 return NULL_TREE;
9618 }
9619
9620 /* Fold a call to builtin logb/ilogb. */
9621
9622 static tree
9623 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9624 {
9625 if (! validate_arg (arg, REAL_TYPE))
9626 return NULL_TREE;
9627
9628 STRIP_NOPS (arg);
9629
9630 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9631 {
9632 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9633
9634 switch (value->cl)
9635 {
9636 case rvc_nan:
9637 case rvc_inf:
9638 /* If arg is Inf or NaN and we're logb, return it. */
9639 if (TREE_CODE (rettype) == REAL_TYPE)
9640 {
9641 /* For logb(-Inf) we have to return +Inf. */
9642 if (real_isinf (value) && real_isneg (value))
9643 {
9644 REAL_VALUE_TYPE tem;
9645 real_inf (&tem);
9646 return build_real (rettype, tem);
9647 }
9648 return fold_convert_loc (loc, rettype, arg);
9649 }
9650 /* Fall through... */
9651 case rvc_zero:
9652 /* Zero may set errno and/or raise an exception for logb, also
9653 for ilogb we don't know FP_ILOGB0. */
9654 return NULL_TREE;
9655 case rvc_normal:
9656 /* For normal numbers, proceed iff radix == 2. In GCC,
9657 normalized significands are in the range [0.5, 1.0). We
9658 want the exponent as if they were [1.0, 2.0) so get the
9659 exponent and subtract 1. */
9660 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9661 return fold_convert_loc (loc, rettype,
9662 build_int_cst (integer_type_node,
9663 REAL_EXP (value)-1));
9664 break;
9665 }
9666 }
9667
9668 return NULL_TREE;
9669 }
9670
9671 /* Fold a call to builtin significand, if radix == 2. */
9672
9673 static tree
9674 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9675 {
9676 if (! validate_arg (arg, REAL_TYPE))
9677 return NULL_TREE;
9678
9679 STRIP_NOPS (arg);
9680
9681 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9682 {
9683 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9684
9685 switch (value->cl)
9686 {
9687 case rvc_zero:
9688 case rvc_nan:
9689 case rvc_inf:
9690 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9691 return fold_convert_loc (loc, rettype, arg);
9692 case rvc_normal:
9693 /* For normal numbers, proceed iff radix == 2. */
9694 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9695 {
9696 REAL_VALUE_TYPE result = *value;
9697 /* In GCC, normalized significands are in the range [0.5,
9698 1.0). We want them to be [1.0, 2.0) so set the
9699 exponent to 1. */
9700 SET_REAL_EXP (&result, 1);
9701 return build_real (rettype, result);
9702 }
9703 break;
9704 }
9705 }
9706
9707 return NULL_TREE;
9708 }
9709
9710 /* Fold a call to builtin frexp, we can assume the base is 2. */
9711
9712 static tree
9713 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9714 {
9715 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9716 return NULL_TREE;
9717
9718 STRIP_NOPS (arg0);
9719
9720 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9721 return NULL_TREE;
9722
9723 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9724
9725 /* Proceed if a valid pointer type was passed in. */
9726 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9727 {
9728 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9729 tree frac, exp;
9730
9731 switch (value->cl)
9732 {
9733 case rvc_zero:
9734 /* For +-0, return (*exp = 0, +-0). */
9735 exp = integer_zero_node;
9736 frac = arg0;
9737 break;
9738 case rvc_nan:
9739 case rvc_inf:
9740 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9741 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9742 case rvc_normal:
9743 {
9744 /* Since the frexp function always expects base 2, and in
9745 GCC normalized significands are already in the range
9746 [0.5, 1.0), we have exactly what frexp wants. */
9747 REAL_VALUE_TYPE frac_rvt = *value;
9748 SET_REAL_EXP (&frac_rvt, 0);
9749 frac = build_real (rettype, frac_rvt);
9750 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9751 }
9752 break;
9753 default:
9754 gcc_unreachable ();
9755 }
9756
9757 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9758 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9759 TREE_SIDE_EFFECTS (arg1) = 1;
9760 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9761 }
9762
9763 return NULL_TREE;
9764 }
9765
9766 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9767 then we can assume the base is two. If it's false, then we have to
9768 check the mode of the TYPE parameter in certain cases. */
9769
9770 static tree
9771 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9772 tree type, bool ldexp)
9773 {
9774 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9775 {
9776 STRIP_NOPS (arg0);
9777 STRIP_NOPS (arg1);
9778
9779 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9780 if (real_zerop (arg0) || integer_zerop (arg1)
9781 || (TREE_CODE (arg0) == REAL_CST
9782 && !real_isfinite (&TREE_REAL_CST (arg0))))
9783 return omit_one_operand_loc (loc, type, arg0, arg1);
9784
9785 /* If both arguments are constant, then try to evaluate it. */
9786 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9787 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9788 && tree_fits_shwi_p (arg1))
9789 {
9790 /* Bound the maximum adjustment to twice the range of the
9791 mode's valid exponents. Use abs to ensure the range is
9792 positive as a sanity check. */
9793 const long max_exp_adj = 2 *
9794 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9795 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9796
9797 /* Get the user-requested adjustment. */
9798 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9799
9800 /* The requested adjustment must be inside this range. This
9801 is a preliminary cap to avoid things like overflow, we
9802 may still fail to compute the result for other reasons. */
9803 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9804 {
9805 REAL_VALUE_TYPE initial_result;
9806
9807 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9808
9809 /* Ensure we didn't overflow. */
9810 if (! real_isinf (&initial_result))
9811 {
9812 const REAL_VALUE_TYPE trunc_result
9813 = real_value_truncate (TYPE_MODE (type), initial_result);
9814
9815 /* Only proceed if the target mode can hold the
9816 resulting value. */
9817 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9818 return build_real (type, trunc_result);
9819 }
9820 }
9821 }
9822 }
9823
9824 return NULL_TREE;
9825 }
9826
9827 /* Fold a call to builtin modf. */
9828
9829 static tree
9830 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9831 {
9832 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9833 return NULL_TREE;
9834
9835 STRIP_NOPS (arg0);
9836
9837 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9838 return NULL_TREE;
9839
9840 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9841
9842 /* Proceed if a valid pointer type was passed in. */
9843 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9844 {
9845 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9846 REAL_VALUE_TYPE trunc, frac;
9847
9848 switch (value->cl)
9849 {
9850 case rvc_nan:
9851 case rvc_zero:
9852 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9853 trunc = frac = *value;
9854 break;
9855 case rvc_inf:
9856 /* For +-Inf, return (*arg1 = arg0, +-0). */
9857 frac = dconst0;
9858 frac.sign = value->sign;
9859 trunc = *value;
9860 break;
9861 case rvc_normal:
9862 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9863 real_trunc (&trunc, VOIDmode, value);
9864 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9865 /* If the original number was negative and already
9866 integral, then the fractional part is -0.0. */
9867 if (value->sign && frac.cl == rvc_zero)
9868 frac.sign = value->sign;
9869 break;
9870 }
9871
9872 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9873 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9874 build_real (rettype, trunc));
9875 TREE_SIDE_EFFECTS (arg1) = 1;
9876 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9877 build_real (rettype, frac));
9878 }
9879
9880 return NULL_TREE;
9881 }
9882
9883 /* Given a location LOC, an interclass builtin function decl FNDECL
9884 and its single argument ARG, return an folded expression computing
9885 the same, or NULL_TREE if we either couldn't or didn't want to fold
9886 (the latter happen if there's an RTL instruction available). */
9887
9888 static tree
9889 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9890 {
9891 enum machine_mode mode;
9892
9893 if (!validate_arg (arg, REAL_TYPE))
9894 return NULL_TREE;
9895
9896 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9897 return NULL_TREE;
9898
9899 mode = TYPE_MODE (TREE_TYPE (arg));
9900
9901 /* If there is no optab, try generic code. */
9902 switch (DECL_FUNCTION_CODE (fndecl))
9903 {
9904 tree result;
9905
9906 CASE_FLT_FN (BUILT_IN_ISINF):
9907 {
9908 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9909 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9910 tree const type = TREE_TYPE (arg);
9911 REAL_VALUE_TYPE r;
9912 char buf[128];
9913
9914 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9915 real_from_string (&r, buf);
9916 result = build_call_expr (isgr_fn, 2,
9917 fold_build1_loc (loc, ABS_EXPR, type, arg),
9918 build_real (type, r));
9919 return result;
9920 }
9921 CASE_FLT_FN (BUILT_IN_FINITE):
9922 case BUILT_IN_ISFINITE:
9923 {
9924 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9925 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9926 tree const type = TREE_TYPE (arg);
9927 REAL_VALUE_TYPE r;
9928 char buf[128];
9929
9930 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9931 real_from_string (&r, buf);
9932 result = build_call_expr (isle_fn, 2,
9933 fold_build1_loc (loc, ABS_EXPR, type, arg),
9934 build_real (type, r));
9935 /*result = fold_build2_loc (loc, UNGT_EXPR,
9936 TREE_TYPE (TREE_TYPE (fndecl)),
9937 fold_build1_loc (loc, ABS_EXPR, type, arg),
9938 build_real (type, r));
9939 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9940 TREE_TYPE (TREE_TYPE (fndecl)),
9941 result);*/
9942 return result;
9943 }
9944 case BUILT_IN_ISNORMAL:
9945 {
9946 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9947 islessequal(fabs(x),DBL_MAX). */
9948 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9949 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9950 tree const type = TREE_TYPE (arg);
9951 REAL_VALUE_TYPE rmax, rmin;
9952 char buf[128];
9953
9954 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9955 real_from_string (&rmax, buf);
9956 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9957 real_from_string (&rmin, buf);
9958 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9959 result = build_call_expr (isle_fn, 2, arg,
9960 build_real (type, rmax));
9961 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9962 build_call_expr (isge_fn, 2, arg,
9963 build_real (type, rmin)));
9964 return result;
9965 }
9966 default:
9967 break;
9968 }
9969
9970 return NULL_TREE;
9971 }
9972
9973 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9974 ARG is the argument for the call. */
9975
9976 static tree
9977 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9978 {
9979 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9980 REAL_VALUE_TYPE r;
9981
9982 if (!validate_arg (arg, REAL_TYPE))
9983 return NULL_TREE;
9984
9985 switch (builtin_index)
9986 {
9987 case BUILT_IN_ISINF:
9988 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9989 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9990
9991 if (TREE_CODE (arg) == REAL_CST)
9992 {
9993 r = TREE_REAL_CST (arg);
9994 if (real_isinf (&r))
9995 return real_compare (GT_EXPR, &r, &dconst0)
9996 ? integer_one_node : integer_minus_one_node;
9997 else
9998 return integer_zero_node;
9999 }
10000
10001 return NULL_TREE;
10002
10003 case BUILT_IN_ISINF_SIGN:
10004 {
10005 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10006 /* In a boolean context, GCC will fold the inner COND_EXPR to
10007 1. So e.g. "if (isinf_sign(x))" would be folded to just
10008 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10009 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10010 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10011 tree tmp = NULL_TREE;
10012
10013 arg = builtin_save_expr (arg);
10014
10015 if (signbit_fn && isinf_fn)
10016 {
10017 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10018 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10019
10020 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10021 signbit_call, integer_zero_node);
10022 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10023 isinf_call, integer_zero_node);
10024
10025 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10026 integer_minus_one_node, integer_one_node);
10027 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10028 isinf_call, tmp,
10029 integer_zero_node);
10030 }
10031
10032 return tmp;
10033 }
10034
10035 case BUILT_IN_ISFINITE:
10036 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10037 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10038 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10039
10040 if (TREE_CODE (arg) == REAL_CST)
10041 {
10042 r = TREE_REAL_CST (arg);
10043 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10044 }
10045
10046 return NULL_TREE;
10047
10048 case BUILT_IN_ISNAN:
10049 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10050 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10051
10052 if (TREE_CODE (arg) == REAL_CST)
10053 {
10054 r = TREE_REAL_CST (arg);
10055 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10056 }
10057
10058 arg = builtin_save_expr (arg);
10059 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10060
10061 default:
10062 gcc_unreachable ();
10063 }
10064 }
10065
10066 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10067 This builtin will generate code to return the appropriate floating
10068 point classification depending on the value of the floating point
10069 number passed in. The possible return values must be supplied as
10070 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10071 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10072 one floating point argument which is "type generic". */
10073
10074 static tree
10075 fold_builtin_fpclassify (location_t loc, tree exp)
10076 {
10077 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10078 arg, type, res, tmp;
10079 enum machine_mode mode;
10080 REAL_VALUE_TYPE r;
10081 char buf[128];
10082
10083 /* Verify the required arguments in the original call. */
10084 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10085 INTEGER_TYPE, INTEGER_TYPE,
10086 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10087 return NULL_TREE;
10088
10089 fp_nan = CALL_EXPR_ARG (exp, 0);
10090 fp_infinite = CALL_EXPR_ARG (exp, 1);
10091 fp_normal = CALL_EXPR_ARG (exp, 2);
10092 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10093 fp_zero = CALL_EXPR_ARG (exp, 4);
10094 arg = CALL_EXPR_ARG (exp, 5);
10095 type = TREE_TYPE (arg);
10096 mode = TYPE_MODE (type);
10097 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10098
10099 /* fpclassify(x) ->
10100 isnan(x) ? FP_NAN :
10101 (fabs(x) == Inf ? FP_INFINITE :
10102 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10103 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10104
10105 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10106 build_real (type, dconst0));
10107 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10108 tmp, fp_zero, fp_subnormal);
10109
10110 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10111 real_from_string (&r, buf);
10112 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10113 arg, build_real (type, r));
10114 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10115
10116 if (HONOR_INFINITIES (mode))
10117 {
10118 real_inf (&r);
10119 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10120 build_real (type, r));
10121 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10122 fp_infinite, res);
10123 }
10124
10125 if (HONOR_NANS (mode))
10126 {
10127 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10128 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10129 }
10130
10131 return res;
10132 }
10133
10134 /* Fold a call to an unordered comparison function such as
10135 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10136 being called and ARG0 and ARG1 are the arguments for the call.
10137 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10138 the opposite of the desired result. UNORDERED_CODE is used
10139 for modes that can hold NaNs and ORDERED_CODE is used for
10140 the rest. */
10141
10142 static tree
10143 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10144 enum tree_code unordered_code,
10145 enum tree_code ordered_code)
10146 {
10147 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10148 enum tree_code code;
10149 tree type0, type1;
10150 enum tree_code code0, code1;
10151 tree cmp_type = NULL_TREE;
10152
10153 type0 = TREE_TYPE (arg0);
10154 type1 = TREE_TYPE (arg1);
10155
10156 code0 = TREE_CODE (type0);
10157 code1 = TREE_CODE (type1);
10158
10159 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10160 /* Choose the wider of two real types. */
10161 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10162 ? type0 : type1;
10163 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10164 cmp_type = type0;
10165 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10166 cmp_type = type1;
10167
10168 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10169 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10170
10171 if (unordered_code == UNORDERED_EXPR)
10172 {
10173 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10174 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10175 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10176 }
10177
10178 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10179 : ordered_code;
10180 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10181 fold_build2_loc (loc, code, type, arg0, arg1));
10182 }
10183
10184 /* Fold a call to built-in function FNDECL with 0 arguments.
10185 IGNORE is true if the result of the function call is ignored. This
10186 function returns NULL_TREE if no simplification was possible. */
10187
10188 static tree
10189 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10190 {
10191 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10192 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10193 switch (fcode)
10194 {
10195 CASE_FLT_FN (BUILT_IN_INF):
10196 case BUILT_IN_INFD32:
10197 case BUILT_IN_INFD64:
10198 case BUILT_IN_INFD128:
10199 return fold_builtin_inf (loc, type, true);
10200
10201 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10202 return fold_builtin_inf (loc, type, false);
10203
10204 case BUILT_IN_CLASSIFY_TYPE:
10205 return fold_builtin_classify_type (NULL_TREE);
10206
10207 case BUILT_IN_UNREACHABLE:
10208 if (flag_sanitize & SANITIZE_UNREACHABLE
10209 && (current_function_decl == NULL
10210 || !lookup_attribute ("no_sanitize_undefined",
10211 DECL_ATTRIBUTES (current_function_decl))))
10212 return ubsan_instrument_unreachable (loc);
10213 break;
10214
10215 default:
10216 break;
10217 }
10218 return NULL_TREE;
10219 }
10220
10221 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10222 IGNORE is true if the result of the function call is ignored. This
10223 function returns NULL_TREE if no simplification was possible. */
10224
10225 static tree
10226 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10227 {
10228 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10229 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10230 switch (fcode)
10231 {
10232 case BUILT_IN_CONSTANT_P:
10233 {
10234 tree val = fold_builtin_constant_p (arg0);
10235
10236 /* Gimplification will pull the CALL_EXPR for the builtin out of
10237 an if condition. When not optimizing, we'll not CSE it back.
10238 To avoid link error types of regressions, return false now. */
10239 if (!val && !optimize)
10240 val = integer_zero_node;
10241
10242 return val;
10243 }
10244
10245 case BUILT_IN_CLASSIFY_TYPE:
10246 return fold_builtin_classify_type (arg0);
10247
10248 case BUILT_IN_STRLEN:
10249 return fold_builtin_strlen (loc, type, arg0);
10250
10251 CASE_FLT_FN (BUILT_IN_FABS):
10252 case BUILT_IN_FABSD32:
10253 case BUILT_IN_FABSD64:
10254 case BUILT_IN_FABSD128:
10255 return fold_builtin_fabs (loc, arg0, type);
10256
10257 case BUILT_IN_ABS:
10258 case BUILT_IN_LABS:
10259 case BUILT_IN_LLABS:
10260 case BUILT_IN_IMAXABS:
10261 return fold_builtin_abs (loc, arg0, type);
10262
10263 CASE_FLT_FN (BUILT_IN_CONJ):
10264 if (validate_arg (arg0, COMPLEX_TYPE)
10265 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10266 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10267 break;
10268
10269 CASE_FLT_FN (BUILT_IN_CREAL):
10270 if (validate_arg (arg0, COMPLEX_TYPE)
10271 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10272 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10273 break;
10274
10275 CASE_FLT_FN (BUILT_IN_CIMAG):
10276 if (validate_arg (arg0, COMPLEX_TYPE)
10277 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10278 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10279 break;
10280
10281 CASE_FLT_FN (BUILT_IN_CCOS):
10282 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10283
10284 CASE_FLT_FN (BUILT_IN_CCOSH):
10285 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10286
10287 CASE_FLT_FN (BUILT_IN_CPROJ):
10288 return fold_builtin_cproj (loc, arg0, type);
10289
10290 CASE_FLT_FN (BUILT_IN_CSIN):
10291 if (validate_arg (arg0, COMPLEX_TYPE)
10292 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10293 return do_mpc_arg1 (arg0, type, mpc_sin);
10294 break;
10295
10296 CASE_FLT_FN (BUILT_IN_CSINH):
10297 if (validate_arg (arg0, COMPLEX_TYPE)
10298 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10299 return do_mpc_arg1 (arg0, type, mpc_sinh);
10300 break;
10301
10302 CASE_FLT_FN (BUILT_IN_CTAN):
10303 if (validate_arg (arg0, COMPLEX_TYPE)
10304 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10305 return do_mpc_arg1 (arg0, type, mpc_tan);
10306 break;
10307
10308 CASE_FLT_FN (BUILT_IN_CTANH):
10309 if (validate_arg (arg0, COMPLEX_TYPE)
10310 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10311 return do_mpc_arg1 (arg0, type, mpc_tanh);
10312 break;
10313
10314 CASE_FLT_FN (BUILT_IN_CLOG):
10315 if (validate_arg (arg0, COMPLEX_TYPE)
10316 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10317 return do_mpc_arg1 (arg0, type, mpc_log);
10318 break;
10319
10320 CASE_FLT_FN (BUILT_IN_CSQRT):
10321 if (validate_arg (arg0, COMPLEX_TYPE)
10322 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10323 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10324 break;
10325
10326 CASE_FLT_FN (BUILT_IN_CASIN):
10327 if (validate_arg (arg0, COMPLEX_TYPE)
10328 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10329 return do_mpc_arg1 (arg0, type, mpc_asin);
10330 break;
10331
10332 CASE_FLT_FN (BUILT_IN_CACOS):
10333 if (validate_arg (arg0, COMPLEX_TYPE)
10334 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10335 return do_mpc_arg1 (arg0, type, mpc_acos);
10336 break;
10337
10338 CASE_FLT_FN (BUILT_IN_CATAN):
10339 if (validate_arg (arg0, COMPLEX_TYPE)
10340 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10341 return do_mpc_arg1 (arg0, type, mpc_atan);
10342 break;
10343
10344 CASE_FLT_FN (BUILT_IN_CASINH):
10345 if (validate_arg (arg0, COMPLEX_TYPE)
10346 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10347 return do_mpc_arg1 (arg0, type, mpc_asinh);
10348 break;
10349
10350 CASE_FLT_FN (BUILT_IN_CACOSH):
10351 if (validate_arg (arg0, COMPLEX_TYPE)
10352 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10353 return do_mpc_arg1 (arg0, type, mpc_acosh);
10354 break;
10355
10356 CASE_FLT_FN (BUILT_IN_CATANH):
10357 if (validate_arg (arg0, COMPLEX_TYPE)
10358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10359 return do_mpc_arg1 (arg0, type, mpc_atanh);
10360 break;
10361
10362 CASE_FLT_FN (BUILT_IN_CABS):
10363 return fold_builtin_cabs (loc, arg0, type, fndecl);
10364
10365 CASE_FLT_FN (BUILT_IN_CARG):
10366 return fold_builtin_carg (loc, arg0, type);
10367
10368 CASE_FLT_FN (BUILT_IN_SQRT):
10369 return fold_builtin_sqrt (loc, arg0, type);
10370
10371 CASE_FLT_FN (BUILT_IN_CBRT):
10372 return fold_builtin_cbrt (loc, arg0, type);
10373
10374 CASE_FLT_FN (BUILT_IN_ASIN):
10375 if (validate_arg (arg0, REAL_TYPE))
10376 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10377 &dconstm1, &dconst1, true);
10378 break;
10379
10380 CASE_FLT_FN (BUILT_IN_ACOS):
10381 if (validate_arg (arg0, REAL_TYPE))
10382 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10383 &dconstm1, &dconst1, true);
10384 break;
10385
10386 CASE_FLT_FN (BUILT_IN_ATAN):
10387 if (validate_arg (arg0, REAL_TYPE))
10388 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10389 break;
10390
10391 CASE_FLT_FN (BUILT_IN_ASINH):
10392 if (validate_arg (arg0, REAL_TYPE))
10393 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10394 break;
10395
10396 CASE_FLT_FN (BUILT_IN_ACOSH):
10397 if (validate_arg (arg0, REAL_TYPE))
10398 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10399 &dconst1, NULL, true);
10400 break;
10401
10402 CASE_FLT_FN (BUILT_IN_ATANH):
10403 if (validate_arg (arg0, REAL_TYPE))
10404 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10405 &dconstm1, &dconst1, false);
10406 break;
10407
10408 CASE_FLT_FN (BUILT_IN_SIN):
10409 if (validate_arg (arg0, REAL_TYPE))
10410 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10411 break;
10412
10413 CASE_FLT_FN (BUILT_IN_COS):
10414 return fold_builtin_cos (loc, arg0, type, fndecl);
10415
10416 CASE_FLT_FN (BUILT_IN_TAN):
10417 return fold_builtin_tan (arg0, type);
10418
10419 CASE_FLT_FN (BUILT_IN_CEXP):
10420 return fold_builtin_cexp (loc, arg0, type);
10421
10422 CASE_FLT_FN (BUILT_IN_CEXPI):
10423 if (validate_arg (arg0, REAL_TYPE))
10424 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10425 break;
10426
10427 CASE_FLT_FN (BUILT_IN_SINH):
10428 if (validate_arg (arg0, REAL_TYPE))
10429 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10430 break;
10431
10432 CASE_FLT_FN (BUILT_IN_COSH):
10433 return fold_builtin_cosh (loc, arg0, type, fndecl);
10434
10435 CASE_FLT_FN (BUILT_IN_TANH):
10436 if (validate_arg (arg0, REAL_TYPE))
10437 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10438 break;
10439
10440 CASE_FLT_FN (BUILT_IN_ERF):
10441 if (validate_arg (arg0, REAL_TYPE))
10442 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10443 break;
10444
10445 CASE_FLT_FN (BUILT_IN_ERFC):
10446 if (validate_arg (arg0, REAL_TYPE))
10447 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10448 break;
10449
10450 CASE_FLT_FN (BUILT_IN_TGAMMA):
10451 if (validate_arg (arg0, REAL_TYPE))
10452 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10453 break;
10454
10455 CASE_FLT_FN (BUILT_IN_EXP):
10456 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10457
10458 CASE_FLT_FN (BUILT_IN_EXP2):
10459 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10460
10461 CASE_FLT_FN (BUILT_IN_EXP10):
10462 CASE_FLT_FN (BUILT_IN_POW10):
10463 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10464
10465 CASE_FLT_FN (BUILT_IN_EXPM1):
10466 if (validate_arg (arg0, REAL_TYPE))
10467 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10468 break;
10469
10470 CASE_FLT_FN (BUILT_IN_LOG):
10471 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10472
10473 CASE_FLT_FN (BUILT_IN_LOG2):
10474 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10475
10476 CASE_FLT_FN (BUILT_IN_LOG10):
10477 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10478
10479 CASE_FLT_FN (BUILT_IN_LOG1P):
10480 if (validate_arg (arg0, REAL_TYPE))
10481 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10482 &dconstm1, NULL, false);
10483 break;
10484
10485 CASE_FLT_FN (BUILT_IN_J0):
10486 if (validate_arg (arg0, REAL_TYPE))
10487 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10488 NULL, NULL, 0);
10489 break;
10490
10491 CASE_FLT_FN (BUILT_IN_J1):
10492 if (validate_arg (arg0, REAL_TYPE))
10493 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10494 NULL, NULL, 0);
10495 break;
10496
10497 CASE_FLT_FN (BUILT_IN_Y0):
10498 if (validate_arg (arg0, REAL_TYPE))
10499 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10500 &dconst0, NULL, false);
10501 break;
10502
10503 CASE_FLT_FN (BUILT_IN_Y1):
10504 if (validate_arg (arg0, REAL_TYPE))
10505 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10506 &dconst0, NULL, false);
10507 break;
10508
10509 CASE_FLT_FN (BUILT_IN_NAN):
10510 case BUILT_IN_NAND32:
10511 case BUILT_IN_NAND64:
10512 case BUILT_IN_NAND128:
10513 return fold_builtin_nan (arg0, type, true);
10514
10515 CASE_FLT_FN (BUILT_IN_NANS):
10516 return fold_builtin_nan (arg0, type, false);
10517
10518 CASE_FLT_FN (BUILT_IN_FLOOR):
10519 return fold_builtin_floor (loc, fndecl, arg0);
10520
10521 CASE_FLT_FN (BUILT_IN_CEIL):
10522 return fold_builtin_ceil (loc, fndecl, arg0);
10523
10524 CASE_FLT_FN (BUILT_IN_TRUNC):
10525 return fold_builtin_trunc (loc, fndecl, arg0);
10526
10527 CASE_FLT_FN (BUILT_IN_ROUND):
10528 return fold_builtin_round (loc, fndecl, arg0);
10529
10530 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10531 CASE_FLT_FN (BUILT_IN_RINT):
10532 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10533
10534 CASE_FLT_FN (BUILT_IN_ICEIL):
10535 CASE_FLT_FN (BUILT_IN_LCEIL):
10536 CASE_FLT_FN (BUILT_IN_LLCEIL):
10537 CASE_FLT_FN (BUILT_IN_LFLOOR):
10538 CASE_FLT_FN (BUILT_IN_IFLOOR):
10539 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10540 CASE_FLT_FN (BUILT_IN_IROUND):
10541 CASE_FLT_FN (BUILT_IN_LROUND):
10542 CASE_FLT_FN (BUILT_IN_LLROUND):
10543 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10544
10545 CASE_FLT_FN (BUILT_IN_IRINT):
10546 CASE_FLT_FN (BUILT_IN_LRINT):
10547 CASE_FLT_FN (BUILT_IN_LLRINT):
10548 return fold_fixed_mathfn (loc, fndecl, arg0);
10549
10550 case BUILT_IN_BSWAP16:
10551 case BUILT_IN_BSWAP32:
10552 case BUILT_IN_BSWAP64:
10553 return fold_builtin_bswap (fndecl, arg0);
10554
10555 CASE_INT_FN (BUILT_IN_FFS):
10556 CASE_INT_FN (BUILT_IN_CLZ):
10557 CASE_INT_FN (BUILT_IN_CTZ):
10558 CASE_INT_FN (BUILT_IN_CLRSB):
10559 CASE_INT_FN (BUILT_IN_POPCOUNT):
10560 CASE_INT_FN (BUILT_IN_PARITY):
10561 return fold_builtin_bitop (fndecl, arg0);
10562
10563 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10564 return fold_builtin_signbit (loc, arg0, type);
10565
10566 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10567 return fold_builtin_significand (loc, arg0, type);
10568
10569 CASE_FLT_FN (BUILT_IN_ILOGB):
10570 CASE_FLT_FN (BUILT_IN_LOGB):
10571 return fold_builtin_logb (loc, arg0, type);
10572
10573 case BUILT_IN_ISASCII:
10574 return fold_builtin_isascii (loc, arg0);
10575
10576 case BUILT_IN_TOASCII:
10577 return fold_builtin_toascii (loc, arg0);
10578
10579 case BUILT_IN_ISDIGIT:
10580 return fold_builtin_isdigit (loc, arg0);
10581
10582 CASE_FLT_FN (BUILT_IN_FINITE):
10583 case BUILT_IN_FINITED32:
10584 case BUILT_IN_FINITED64:
10585 case BUILT_IN_FINITED128:
10586 case BUILT_IN_ISFINITE:
10587 {
10588 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10589 if (ret)
10590 return ret;
10591 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10592 }
10593
10594 CASE_FLT_FN (BUILT_IN_ISINF):
10595 case BUILT_IN_ISINFD32:
10596 case BUILT_IN_ISINFD64:
10597 case BUILT_IN_ISINFD128:
10598 {
10599 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10600 if (ret)
10601 return ret;
10602 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10603 }
10604
10605 case BUILT_IN_ISNORMAL:
10606 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10607
10608 case BUILT_IN_ISINF_SIGN:
10609 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10610
10611 CASE_FLT_FN (BUILT_IN_ISNAN):
10612 case BUILT_IN_ISNAND32:
10613 case BUILT_IN_ISNAND64:
10614 case BUILT_IN_ISNAND128:
10615 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10616
10617 case BUILT_IN_PRINTF:
10618 case BUILT_IN_PRINTF_UNLOCKED:
10619 case BUILT_IN_VPRINTF:
10620 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10621
10622 case BUILT_IN_FREE:
10623 if (integer_zerop (arg0))
10624 return build_empty_stmt (loc);
10625 break;
10626
10627 default:
10628 break;
10629 }
10630
10631 return NULL_TREE;
10632
10633 }
10634
10635 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10636 IGNORE is true if the result of the function call is ignored. This
10637 function returns NULL_TREE if no simplification was possible. */
10638
10639 static tree
10640 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10641 {
10642 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10643 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10644
10645 switch (fcode)
10646 {
10647 CASE_FLT_FN (BUILT_IN_JN):
10648 if (validate_arg (arg0, INTEGER_TYPE)
10649 && validate_arg (arg1, REAL_TYPE))
10650 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10651 break;
10652
10653 CASE_FLT_FN (BUILT_IN_YN):
10654 if (validate_arg (arg0, INTEGER_TYPE)
10655 && validate_arg (arg1, REAL_TYPE))
10656 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10657 &dconst0, false);
10658 break;
10659
10660 CASE_FLT_FN (BUILT_IN_DREM):
10661 CASE_FLT_FN (BUILT_IN_REMAINDER):
10662 if (validate_arg (arg0, REAL_TYPE)
10663 && validate_arg (arg1, REAL_TYPE))
10664 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10665 break;
10666
10667 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10668 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10669 if (validate_arg (arg0, REAL_TYPE)
10670 && validate_arg (arg1, POINTER_TYPE))
10671 return do_mpfr_lgamma_r (arg0, arg1, type);
10672 break;
10673
10674 CASE_FLT_FN (BUILT_IN_ATAN2):
10675 if (validate_arg (arg0, REAL_TYPE)
10676 && validate_arg (arg1, REAL_TYPE))
10677 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10678 break;
10679
10680 CASE_FLT_FN (BUILT_IN_FDIM):
10681 if (validate_arg (arg0, REAL_TYPE)
10682 && validate_arg (arg1, REAL_TYPE))
10683 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10684 break;
10685
10686 CASE_FLT_FN (BUILT_IN_HYPOT):
10687 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10688
10689 CASE_FLT_FN (BUILT_IN_CPOW):
10690 if (validate_arg (arg0, COMPLEX_TYPE)
10691 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10692 && validate_arg (arg1, COMPLEX_TYPE)
10693 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10694 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10695 break;
10696
10697 CASE_FLT_FN (BUILT_IN_LDEXP):
10698 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10699 CASE_FLT_FN (BUILT_IN_SCALBN):
10700 CASE_FLT_FN (BUILT_IN_SCALBLN):
10701 return fold_builtin_load_exponent (loc, arg0, arg1,
10702 type, /*ldexp=*/false);
10703
10704 CASE_FLT_FN (BUILT_IN_FREXP):
10705 return fold_builtin_frexp (loc, arg0, arg1, type);
10706
10707 CASE_FLT_FN (BUILT_IN_MODF):
10708 return fold_builtin_modf (loc, arg0, arg1, type);
10709
10710 case BUILT_IN_BZERO:
10711 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10712
10713 case BUILT_IN_FPUTS:
10714 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10715
10716 case BUILT_IN_FPUTS_UNLOCKED:
10717 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10718
10719 case BUILT_IN_STRSTR:
10720 return fold_builtin_strstr (loc, arg0, arg1, type);
10721
10722 case BUILT_IN_STRCAT:
10723 return fold_builtin_strcat (loc, arg0, arg1, NULL_TREE);
10724
10725 case BUILT_IN_STRSPN:
10726 return fold_builtin_strspn (loc, arg0, arg1);
10727
10728 case BUILT_IN_STRCSPN:
10729 return fold_builtin_strcspn (loc, arg0, arg1);
10730
10731 case BUILT_IN_STRCHR:
10732 case BUILT_IN_INDEX:
10733 return fold_builtin_strchr (loc, arg0, arg1, type);
10734
10735 case BUILT_IN_STRRCHR:
10736 case BUILT_IN_RINDEX:
10737 return fold_builtin_strrchr (loc, arg0, arg1, type);
10738
10739 case BUILT_IN_STRCPY:
10740 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10741
10742 case BUILT_IN_STPCPY:
10743 if (ignore)
10744 {
10745 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10746 if (!fn)
10747 break;
10748
10749 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10750 }
10751 else
10752 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10753 break;
10754
10755 case BUILT_IN_STRCMP:
10756 return fold_builtin_strcmp (loc, arg0, arg1);
10757
10758 case BUILT_IN_STRPBRK:
10759 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10760
10761 case BUILT_IN_EXPECT:
10762 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10763
10764 CASE_FLT_FN (BUILT_IN_POW):
10765 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10766
10767 CASE_FLT_FN (BUILT_IN_POWI):
10768 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10769
10770 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10771 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10772
10773 CASE_FLT_FN (BUILT_IN_FMIN):
10774 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10775
10776 CASE_FLT_FN (BUILT_IN_FMAX):
10777 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10778
10779 case BUILT_IN_ISGREATER:
10780 return fold_builtin_unordered_cmp (loc, fndecl,
10781 arg0, arg1, UNLE_EXPR, LE_EXPR);
10782 case BUILT_IN_ISGREATEREQUAL:
10783 return fold_builtin_unordered_cmp (loc, fndecl,
10784 arg0, arg1, UNLT_EXPR, LT_EXPR);
10785 case BUILT_IN_ISLESS:
10786 return fold_builtin_unordered_cmp (loc, fndecl,
10787 arg0, arg1, UNGE_EXPR, GE_EXPR);
10788 case BUILT_IN_ISLESSEQUAL:
10789 return fold_builtin_unordered_cmp (loc, fndecl,
10790 arg0, arg1, UNGT_EXPR, GT_EXPR);
10791 case BUILT_IN_ISLESSGREATER:
10792 return fold_builtin_unordered_cmp (loc, fndecl,
10793 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10794 case BUILT_IN_ISUNORDERED:
10795 return fold_builtin_unordered_cmp (loc, fndecl,
10796 arg0, arg1, UNORDERED_EXPR,
10797 NOP_EXPR);
10798
10799 /* We do the folding for va_start in the expander. */
10800 case BUILT_IN_VA_START:
10801 break;
10802
10803 case BUILT_IN_SPRINTF:
10804 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10805
10806 case BUILT_IN_OBJECT_SIZE:
10807 return fold_builtin_object_size (arg0, arg1);
10808
10809 case BUILT_IN_PRINTF:
10810 case BUILT_IN_PRINTF_UNLOCKED:
10811 case BUILT_IN_VPRINTF:
10812 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10813
10814 case BUILT_IN_PRINTF_CHK:
10815 case BUILT_IN_VPRINTF_CHK:
10816 if (!validate_arg (arg0, INTEGER_TYPE)
10817 || TREE_SIDE_EFFECTS (arg0))
10818 return NULL_TREE;
10819 else
10820 return fold_builtin_printf (loc, fndecl,
10821 arg1, NULL_TREE, ignore, fcode);
10822 break;
10823
10824 case BUILT_IN_FPRINTF:
10825 case BUILT_IN_FPRINTF_UNLOCKED:
10826 case BUILT_IN_VFPRINTF:
10827 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10828 ignore, fcode);
10829
10830 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10831 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10832
10833 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10834 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10835
10836 default:
10837 break;
10838 }
10839 return NULL_TREE;
10840 }
10841
10842 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10843 and ARG2. IGNORE is true if the result of the function call is ignored.
10844 This function returns NULL_TREE if no simplification was possible. */
10845
10846 static tree
10847 fold_builtin_3 (location_t loc, tree fndecl,
10848 tree arg0, tree arg1, tree arg2, bool ignore)
10849 {
10850 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10851 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10852 switch (fcode)
10853 {
10854
10855 CASE_FLT_FN (BUILT_IN_SINCOS):
10856 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10857
10858 CASE_FLT_FN (BUILT_IN_FMA):
10859 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10860 break;
10861
10862 CASE_FLT_FN (BUILT_IN_REMQUO):
10863 if (validate_arg (arg0, REAL_TYPE)
10864 && validate_arg (arg1, REAL_TYPE)
10865 && validate_arg (arg2, POINTER_TYPE))
10866 return do_mpfr_remquo (arg0, arg1, arg2);
10867 break;
10868
10869 case BUILT_IN_MEMSET:
10870 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10871
10872 case BUILT_IN_BCOPY:
10873 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10874 void_type_node, true, /*endp=*/3);
10875
10876 case BUILT_IN_MEMCPY:
10877 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10878 type, ignore, /*endp=*/0);
10879
10880 case BUILT_IN_MEMPCPY:
10881 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10882 type, ignore, /*endp=*/1);
10883
10884 case BUILT_IN_MEMMOVE:
10885 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10886 type, ignore, /*endp=*/3);
10887
10888 case BUILT_IN_STRNCAT:
10889 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10890
10891 case BUILT_IN_STRNCPY:
10892 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10893
10894 case BUILT_IN_STRNCMP:
10895 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10896
10897 case BUILT_IN_MEMCHR:
10898 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10899
10900 case BUILT_IN_BCMP:
10901 case BUILT_IN_MEMCMP:
10902 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10903
10904 case BUILT_IN_SPRINTF:
10905 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10906
10907 case BUILT_IN_SNPRINTF:
10908 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10909
10910 case BUILT_IN_STRCPY_CHK:
10911 case BUILT_IN_STPCPY_CHK:
10912 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10913 ignore, fcode);
10914
10915 case BUILT_IN_STRCAT_CHK:
10916 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10917
10918 case BUILT_IN_PRINTF_CHK:
10919 case BUILT_IN_VPRINTF_CHK:
10920 if (!validate_arg (arg0, INTEGER_TYPE)
10921 || TREE_SIDE_EFFECTS (arg0))
10922 return NULL_TREE;
10923 else
10924 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10925 break;
10926
10927 case BUILT_IN_FPRINTF:
10928 case BUILT_IN_FPRINTF_UNLOCKED:
10929 case BUILT_IN_VFPRINTF:
10930 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10931 ignore, fcode);
10932
10933 case BUILT_IN_FPRINTF_CHK:
10934 case BUILT_IN_VFPRINTF_CHK:
10935 if (!validate_arg (arg1, INTEGER_TYPE)
10936 || TREE_SIDE_EFFECTS (arg1))
10937 return NULL_TREE;
10938 else
10939 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10940 ignore, fcode);
10941
10942 case BUILT_IN_EXPECT:
10943 return fold_builtin_expect (loc, arg0, arg1, arg2);
10944
10945 default:
10946 break;
10947 }
10948 return NULL_TREE;
10949 }
10950
10951 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10952 ARG2, and ARG3. IGNORE is true if the result of the function call is
10953 ignored. This function returns NULL_TREE if no simplification was
10954 possible. */
10955
10956 static tree
10957 fold_builtin_4 (location_t loc, tree fndecl,
10958 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10959 {
10960 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10961
10962 switch (fcode)
10963 {
10964 case BUILT_IN_MEMCPY_CHK:
10965 case BUILT_IN_MEMPCPY_CHK:
10966 case BUILT_IN_MEMMOVE_CHK:
10967 case BUILT_IN_MEMSET_CHK:
10968 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10969 NULL_TREE, ignore,
10970 DECL_FUNCTION_CODE (fndecl));
10971
10972 case BUILT_IN_STRNCPY_CHK:
10973 case BUILT_IN_STPNCPY_CHK:
10974 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
10975 ignore, fcode);
10976
10977 case BUILT_IN_STRNCAT_CHK:
10978 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10979
10980 case BUILT_IN_SNPRINTF:
10981 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10982
10983 case BUILT_IN_FPRINTF_CHK:
10984 case BUILT_IN_VFPRINTF_CHK:
10985 if (!validate_arg (arg1, INTEGER_TYPE)
10986 || TREE_SIDE_EFFECTS (arg1))
10987 return NULL_TREE;
10988 else
10989 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10990 ignore, fcode);
10991 break;
10992
10993 default:
10994 break;
10995 }
10996 return NULL_TREE;
10997 }
10998
10999 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11000 arguments, where NARGS <= 4. IGNORE is true if the result of the
11001 function call is ignored. This function returns NULL_TREE if no
11002 simplification was possible. Note that this only folds builtins with
11003 fixed argument patterns. Foldings that do varargs-to-varargs
11004 transformations, or that match calls with more than 4 arguments,
11005 need to be handled with fold_builtin_varargs instead. */
11006
11007 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11008
11009 static tree
11010 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11011 {
11012 tree ret = NULL_TREE;
11013
11014 switch (nargs)
11015 {
11016 case 0:
11017 ret = fold_builtin_0 (loc, fndecl, ignore);
11018 break;
11019 case 1:
11020 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11021 break;
11022 case 2:
11023 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11024 break;
11025 case 3:
11026 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11027 break;
11028 case 4:
11029 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11030 ignore);
11031 break;
11032 default:
11033 break;
11034 }
11035 if (ret)
11036 {
11037 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11038 SET_EXPR_LOCATION (ret, loc);
11039 TREE_NO_WARNING (ret) = 1;
11040 return ret;
11041 }
11042 return NULL_TREE;
11043 }
11044
11045 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11046 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11047 of arguments in ARGS to be omitted. OLDNARGS is the number of
11048 elements in ARGS. */
11049
11050 static tree
11051 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11052 int skip, tree fndecl, int n, va_list newargs)
11053 {
11054 int nargs = oldnargs - skip + n;
11055 tree *buffer;
11056
11057 if (n > 0)
11058 {
11059 int i, j;
11060
11061 buffer = XALLOCAVEC (tree, nargs);
11062 for (i = 0; i < n; i++)
11063 buffer[i] = va_arg (newargs, tree);
11064 for (j = skip; j < oldnargs; j++, i++)
11065 buffer[i] = args[j];
11066 }
11067 else
11068 buffer = args + skip;
11069
11070 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11071 }
11072
11073 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11074 list ARGS along with N new arguments specified as the "..."
11075 parameters. SKIP is the number of arguments in ARGS to be omitted.
11076 OLDNARGS is the number of elements in ARGS. */
11077
11078 static tree
11079 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11080 int skip, tree fndecl, int n, ...)
11081 {
11082 va_list ap;
11083 tree t;
11084
11085 va_start (ap, n);
11086 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11087 va_end (ap);
11088
11089 return t;
11090 }
11091
11092 /* Return true if FNDECL shouldn't be folded right now.
11093 If a built-in function has an inline attribute always_inline
11094 wrapper, defer folding it after always_inline functions have
11095 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11096 might not be performed. */
11097
11098 bool
11099 avoid_folding_inline_builtin (tree fndecl)
11100 {
11101 return (DECL_DECLARED_INLINE_P (fndecl)
11102 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11103 && cfun
11104 && !cfun->always_inline_functions_inlined
11105 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11106 }
11107
11108 /* A wrapper function for builtin folding that prevents warnings for
11109 "statement without effect" and the like, caused by removing the
11110 call node earlier than the warning is generated. */
11111
11112 tree
11113 fold_call_expr (location_t loc, tree exp, bool ignore)
11114 {
11115 tree ret = NULL_TREE;
11116 tree fndecl = get_callee_fndecl (exp);
11117 if (fndecl
11118 && TREE_CODE (fndecl) == FUNCTION_DECL
11119 && DECL_BUILT_IN (fndecl)
11120 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11121 yet. Defer folding until we see all the arguments
11122 (after inlining). */
11123 && !CALL_EXPR_VA_ARG_PACK (exp))
11124 {
11125 int nargs = call_expr_nargs (exp);
11126
11127 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11128 instead last argument is __builtin_va_arg_pack (). Defer folding
11129 even in that case, until arguments are finalized. */
11130 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11131 {
11132 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11133 if (fndecl2
11134 && TREE_CODE (fndecl2) == FUNCTION_DECL
11135 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11136 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11137 return NULL_TREE;
11138 }
11139
11140 if (avoid_folding_inline_builtin (fndecl))
11141 return NULL_TREE;
11142
11143 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11144 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11145 CALL_EXPR_ARGP (exp), ignore);
11146 else
11147 {
11148 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11149 {
11150 tree *args = CALL_EXPR_ARGP (exp);
11151 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11152 }
11153 if (!ret)
11154 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11155 if (ret)
11156 return ret;
11157 }
11158 }
11159 return NULL_TREE;
11160 }
11161
11162 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11163 N arguments are passed in the array ARGARRAY. */
11164
11165 tree
11166 fold_builtin_call_array (location_t loc, tree type,
11167 tree fn,
11168 int n,
11169 tree *argarray)
11170 {
11171 tree ret = NULL_TREE;
11172 tree exp;
11173
11174 if (TREE_CODE (fn) == ADDR_EXPR)
11175 {
11176 tree fndecl = TREE_OPERAND (fn, 0);
11177 if (TREE_CODE (fndecl) == FUNCTION_DECL
11178 && DECL_BUILT_IN (fndecl))
11179 {
11180 /* If last argument is __builtin_va_arg_pack (), arguments to this
11181 function are not finalized yet. Defer folding until they are. */
11182 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11183 {
11184 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11185 if (fndecl2
11186 && TREE_CODE (fndecl2) == FUNCTION_DECL
11187 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11188 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11189 return build_call_array_loc (loc, type, fn, n, argarray);
11190 }
11191 if (avoid_folding_inline_builtin (fndecl))
11192 return build_call_array_loc (loc, type, fn, n, argarray);
11193 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11194 {
11195 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11196 if (ret)
11197 return ret;
11198
11199 return build_call_array_loc (loc, type, fn, n, argarray);
11200 }
11201 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11202 {
11203 /* First try the transformations that don't require consing up
11204 an exp. */
11205 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11206 if (ret)
11207 return ret;
11208 }
11209
11210 /* If we got this far, we need to build an exp. */
11211 exp = build_call_array_loc (loc, type, fn, n, argarray);
11212 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11213 return ret ? ret : exp;
11214 }
11215 }
11216
11217 return build_call_array_loc (loc, type, fn, n, argarray);
11218 }
11219
11220 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11221 along with N new arguments specified as the "..." parameters. SKIP
11222 is the number of arguments in EXP to be omitted. This function is used
11223 to do varargs-to-varargs transformations. */
11224
11225 static tree
11226 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11227 {
11228 va_list ap;
11229 tree t;
11230
11231 va_start (ap, n);
11232 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11233 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11234 va_end (ap);
11235
11236 return t;
11237 }
11238
11239 /* Validate a single argument ARG against a tree code CODE representing
11240 a type. */
11241
11242 static bool
11243 validate_arg (const_tree arg, enum tree_code code)
11244 {
11245 if (!arg)
11246 return false;
11247 else if (code == POINTER_TYPE)
11248 return POINTER_TYPE_P (TREE_TYPE (arg));
11249 else if (code == INTEGER_TYPE)
11250 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11251 return code == TREE_CODE (TREE_TYPE (arg));
11252 }
11253
11254 /* This function validates the types of a function call argument list
11255 against a specified list of tree_codes. If the last specifier is a 0,
11256 that represents an ellipses, otherwise the last specifier must be a
11257 VOID_TYPE.
11258
11259 This is the GIMPLE version of validate_arglist. Eventually we want to
11260 completely convert builtins.c to work from GIMPLEs and the tree based
11261 validate_arglist will then be removed. */
11262
11263 bool
11264 validate_gimple_arglist (const_gimple call, ...)
11265 {
11266 enum tree_code code;
11267 bool res = 0;
11268 va_list ap;
11269 const_tree arg;
11270 size_t i;
11271
11272 va_start (ap, call);
11273 i = 0;
11274
11275 do
11276 {
11277 code = (enum tree_code) va_arg (ap, int);
11278 switch (code)
11279 {
11280 case 0:
11281 /* This signifies an ellipses, any further arguments are all ok. */
11282 res = true;
11283 goto end;
11284 case VOID_TYPE:
11285 /* This signifies an endlink, if no arguments remain, return
11286 true, otherwise return false. */
11287 res = (i == gimple_call_num_args (call));
11288 goto end;
11289 default:
11290 /* If no parameters remain or the parameter's code does not
11291 match the specified code, return false. Otherwise continue
11292 checking any remaining arguments. */
11293 arg = gimple_call_arg (call, i++);
11294 if (!validate_arg (arg, code))
11295 goto end;
11296 break;
11297 }
11298 }
11299 while (1);
11300
11301 /* We need gotos here since we can only have one VA_CLOSE in a
11302 function. */
11303 end: ;
11304 va_end (ap);
11305
11306 return res;
11307 }
11308
11309 /* Default target-specific builtin expander that does nothing. */
11310
11311 rtx
11312 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11313 rtx target ATTRIBUTE_UNUSED,
11314 rtx subtarget ATTRIBUTE_UNUSED,
11315 enum machine_mode mode ATTRIBUTE_UNUSED,
11316 int ignore ATTRIBUTE_UNUSED)
11317 {
11318 return NULL_RTX;
11319 }
11320
11321 /* Returns true is EXP represents data that would potentially reside
11322 in a readonly section. */
11323
11324 static bool
11325 readonly_data_expr (tree exp)
11326 {
11327 STRIP_NOPS (exp);
11328
11329 if (TREE_CODE (exp) != ADDR_EXPR)
11330 return false;
11331
11332 exp = get_base_address (TREE_OPERAND (exp, 0));
11333 if (!exp)
11334 return false;
11335
11336 /* Make sure we call decl_readonly_section only for trees it
11337 can handle (since it returns true for everything it doesn't
11338 understand). */
11339 if (TREE_CODE (exp) == STRING_CST
11340 || TREE_CODE (exp) == CONSTRUCTOR
11341 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11342 return decl_readonly_section (exp, 0);
11343 else
11344 return false;
11345 }
11346
11347 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11348 to the call, and TYPE is its return type.
11349
11350 Return NULL_TREE if no simplification was possible, otherwise return the
11351 simplified form of the call as a tree.
11352
11353 The simplified form may be a constant or other expression which
11354 computes the same value, but in a more efficient manner (including
11355 calls to other builtin functions).
11356
11357 The call may contain arguments which need to be evaluated, but
11358 which are not useful to determine the result of the call. In
11359 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11360 COMPOUND_EXPR will be an argument which must be evaluated.
11361 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11362 COMPOUND_EXPR in the chain will contain the tree for the simplified
11363 form of the builtin function call. */
11364
11365 static tree
11366 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11367 {
11368 if (!validate_arg (s1, POINTER_TYPE)
11369 || !validate_arg (s2, POINTER_TYPE))
11370 return NULL_TREE;
11371 else
11372 {
11373 tree fn;
11374 const char *p1, *p2;
11375
11376 p2 = c_getstr (s2);
11377 if (p2 == NULL)
11378 return NULL_TREE;
11379
11380 p1 = c_getstr (s1);
11381 if (p1 != NULL)
11382 {
11383 const char *r = strstr (p1, p2);
11384 tree tem;
11385
11386 if (r == NULL)
11387 return build_int_cst (TREE_TYPE (s1), 0);
11388
11389 /* Return an offset into the constant string argument. */
11390 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11391 return fold_convert_loc (loc, type, tem);
11392 }
11393
11394 /* The argument is const char *, and the result is char *, so we need
11395 a type conversion here to avoid a warning. */
11396 if (p2[0] == '\0')
11397 return fold_convert_loc (loc, type, s1);
11398
11399 if (p2[1] != '\0')
11400 return NULL_TREE;
11401
11402 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11403 if (!fn)
11404 return NULL_TREE;
11405
11406 /* New argument list transforming strstr(s1, s2) to
11407 strchr(s1, s2[0]). */
11408 return build_call_expr_loc (loc, fn, 2, s1,
11409 build_int_cst (integer_type_node, p2[0]));
11410 }
11411 }
11412
11413 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11414 the call, and TYPE is its return type.
11415
11416 Return NULL_TREE if no simplification was possible, otherwise return the
11417 simplified form of the call as a tree.
11418
11419 The simplified form may be a constant or other expression which
11420 computes the same value, but in a more efficient manner (including
11421 calls to other builtin functions).
11422
11423 The call may contain arguments which need to be evaluated, but
11424 which are not useful to determine the result of the call. In
11425 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11426 COMPOUND_EXPR will be an argument which must be evaluated.
11427 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11428 COMPOUND_EXPR in the chain will contain the tree for the simplified
11429 form of the builtin function call. */
11430
11431 static tree
11432 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11433 {
11434 if (!validate_arg (s1, POINTER_TYPE)
11435 || !validate_arg (s2, INTEGER_TYPE))
11436 return NULL_TREE;
11437 else
11438 {
11439 const char *p1;
11440
11441 if (TREE_CODE (s2) != INTEGER_CST)
11442 return NULL_TREE;
11443
11444 p1 = c_getstr (s1);
11445 if (p1 != NULL)
11446 {
11447 char c;
11448 const char *r;
11449 tree tem;
11450
11451 if (target_char_cast (s2, &c))
11452 return NULL_TREE;
11453
11454 r = strchr (p1, c);
11455
11456 if (r == NULL)
11457 return build_int_cst (TREE_TYPE (s1), 0);
11458
11459 /* Return an offset into the constant string argument. */
11460 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11461 return fold_convert_loc (loc, type, tem);
11462 }
11463 return NULL_TREE;
11464 }
11465 }
11466
11467 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11468 the call, and TYPE is its return type.
11469
11470 Return NULL_TREE if no simplification was possible, otherwise return the
11471 simplified form of the call as a tree.
11472
11473 The simplified form may be a constant or other expression which
11474 computes the same value, but in a more efficient manner (including
11475 calls to other builtin functions).
11476
11477 The call may contain arguments which need to be evaluated, but
11478 which are not useful to determine the result of the call. In
11479 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11480 COMPOUND_EXPR will be an argument which must be evaluated.
11481 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11482 COMPOUND_EXPR in the chain will contain the tree for the simplified
11483 form of the builtin function call. */
11484
11485 static tree
11486 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11487 {
11488 if (!validate_arg (s1, POINTER_TYPE)
11489 || !validate_arg (s2, INTEGER_TYPE))
11490 return NULL_TREE;
11491 else
11492 {
11493 tree fn;
11494 const char *p1;
11495
11496 if (TREE_CODE (s2) != INTEGER_CST)
11497 return NULL_TREE;
11498
11499 p1 = c_getstr (s1);
11500 if (p1 != NULL)
11501 {
11502 char c;
11503 const char *r;
11504 tree tem;
11505
11506 if (target_char_cast (s2, &c))
11507 return NULL_TREE;
11508
11509 r = strrchr (p1, c);
11510
11511 if (r == NULL)
11512 return build_int_cst (TREE_TYPE (s1), 0);
11513
11514 /* Return an offset into the constant string argument. */
11515 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11516 return fold_convert_loc (loc, type, tem);
11517 }
11518
11519 if (! integer_zerop (s2))
11520 return NULL_TREE;
11521
11522 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11523 if (!fn)
11524 return NULL_TREE;
11525
11526 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11527 return build_call_expr_loc (loc, fn, 2, s1, s2);
11528 }
11529 }
11530
11531 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11532 to the call, and TYPE is its return type.
11533
11534 Return NULL_TREE if no simplification was possible, otherwise return the
11535 simplified form of the call as a tree.
11536
11537 The simplified form may be a constant or other expression which
11538 computes the same value, but in a more efficient manner (including
11539 calls to other builtin functions).
11540
11541 The call may contain arguments which need to be evaluated, but
11542 which are not useful to determine the result of the call. In
11543 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11544 COMPOUND_EXPR will be an argument which must be evaluated.
11545 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11546 COMPOUND_EXPR in the chain will contain the tree for the simplified
11547 form of the builtin function call. */
11548
11549 static tree
11550 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11551 {
11552 if (!validate_arg (s1, POINTER_TYPE)
11553 || !validate_arg (s2, POINTER_TYPE))
11554 return NULL_TREE;
11555 else
11556 {
11557 tree fn;
11558 const char *p1, *p2;
11559
11560 p2 = c_getstr (s2);
11561 if (p2 == NULL)
11562 return NULL_TREE;
11563
11564 p1 = c_getstr (s1);
11565 if (p1 != NULL)
11566 {
11567 const char *r = strpbrk (p1, p2);
11568 tree tem;
11569
11570 if (r == NULL)
11571 return build_int_cst (TREE_TYPE (s1), 0);
11572
11573 /* Return an offset into the constant string argument. */
11574 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11575 return fold_convert_loc (loc, type, tem);
11576 }
11577
11578 if (p2[0] == '\0')
11579 /* strpbrk(x, "") == NULL.
11580 Evaluate and ignore s1 in case it had side-effects. */
11581 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11582
11583 if (p2[1] != '\0')
11584 return NULL_TREE; /* Really call strpbrk. */
11585
11586 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11587 if (!fn)
11588 return NULL_TREE;
11589
11590 /* New argument list transforming strpbrk(s1, s2) to
11591 strchr(s1, s2[0]). */
11592 return build_call_expr_loc (loc, fn, 2, s1,
11593 build_int_cst (integer_type_node, p2[0]));
11594 }
11595 }
11596
11597 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11598 to the call.
11599
11600 Return NULL_TREE if no simplification was possible, otherwise return the
11601 simplified form of the call as a tree.
11602
11603 The simplified form may be a constant or other expression which
11604 computes the same value, but in a more efficient manner (including
11605 calls to other builtin functions).
11606
11607 The call may contain arguments which need to be evaluated, but
11608 which are not useful to determine the result of the call. In
11609 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11610 COMPOUND_EXPR will be an argument which must be evaluated.
11611 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11612 COMPOUND_EXPR in the chain will contain the tree for the simplified
11613 form of the builtin function call. */
11614
11615 tree
11616 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src,
11617 tree len)
11618 {
11619 if (!validate_arg (dst, POINTER_TYPE)
11620 || !validate_arg (src, POINTER_TYPE))
11621 return NULL_TREE;
11622 else
11623 {
11624 const char *p = c_getstr (src);
11625
11626 /* If the string length is zero, return the dst parameter. */
11627 if (p && *p == '\0')
11628 return dst;
11629
11630 if (optimize_insn_for_speed_p ())
11631 {
11632 /* See if we can store by pieces into (dst + strlen(dst)). */
11633 tree newdst, call;
11634 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11635 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
11636
11637 if (!strlen_fn || !memcpy_fn)
11638 return NULL_TREE;
11639
11640 /* If the length of the source string isn't computable don't
11641 split strcat into strlen and memcpy. */
11642 if (! len)
11643 len = c_strlen (src, 1);
11644 if (! len || TREE_SIDE_EFFECTS (len))
11645 return NULL_TREE;
11646
11647 /* Stabilize the argument list. */
11648 dst = builtin_save_expr (dst);
11649
11650 /* Create strlen (dst). */
11651 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11652 /* Create (dst p+ strlen (dst)). */
11653
11654 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11655 newdst = builtin_save_expr (newdst);
11656
11657 len = fold_convert_loc (loc, size_type_node, len);
11658 len = size_binop_loc (loc, PLUS_EXPR, len,
11659 build_int_cst (size_type_node, 1));
11660
11661 call = build_call_expr_loc (loc, memcpy_fn, 3, newdst, src, len);
11662 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11663 }
11664 return NULL_TREE;
11665 }
11666 }
11667
11668 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11669 arguments to the call.
11670
11671 Return NULL_TREE if no simplification was possible, otherwise return the
11672 simplified form of the call as a tree.
11673
11674 The simplified form may be a constant or other expression which
11675 computes the same value, but in a more efficient manner (including
11676 calls to other builtin functions).
11677
11678 The call may contain arguments which need to be evaluated, but
11679 which are not useful to determine the result of the call. In
11680 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11681 COMPOUND_EXPR will be an argument which must be evaluated.
11682 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11683 COMPOUND_EXPR in the chain will contain the tree for the simplified
11684 form of the builtin function call. */
11685
11686 static tree
11687 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11688 {
11689 if (!validate_arg (dst, POINTER_TYPE)
11690 || !validate_arg (src, POINTER_TYPE)
11691 || !validate_arg (len, INTEGER_TYPE))
11692 return NULL_TREE;
11693 else
11694 {
11695 const char *p = c_getstr (src);
11696
11697 /* If the requested length is zero, or the src parameter string
11698 length is zero, return the dst parameter. */
11699 if (integer_zerop (len) || (p && *p == '\0'))
11700 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11701
11702 /* If the requested len is greater than or equal to the string
11703 length, call strcat. */
11704 if (TREE_CODE (len) == INTEGER_CST && p
11705 && compare_tree_int (len, strlen (p)) >= 0)
11706 {
11707 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11708
11709 /* If the replacement _DECL isn't initialized, don't do the
11710 transformation. */
11711 if (!fn)
11712 return NULL_TREE;
11713
11714 return build_call_expr_loc (loc, fn, 2, dst, src);
11715 }
11716 return NULL_TREE;
11717 }
11718 }
11719
11720 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11721 to the call.
11722
11723 Return NULL_TREE if no simplification was possible, otherwise return the
11724 simplified form of the call as a tree.
11725
11726 The simplified form may be a constant or other expression which
11727 computes the same value, but in a more efficient manner (including
11728 calls to other builtin functions).
11729
11730 The call may contain arguments which need to be evaluated, but
11731 which are not useful to determine the result of the call. In
11732 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11733 COMPOUND_EXPR will be an argument which must be evaluated.
11734 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11735 COMPOUND_EXPR in the chain will contain the tree for the simplified
11736 form of the builtin function call. */
11737
11738 static tree
11739 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11740 {
11741 if (!validate_arg (s1, POINTER_TYPE)
11742 || !validate_arg (s2, POINTER_TYPE))
11743 return NULL_TREE;
11744 else
11745 {
11746 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11747
11748 /* If both arguments are constants, evaluate at compile-time. */
11749 if (p1 && p2)
11750 {
11751 const size_t r = strspn (p1, p2);
11752 return build_int_cst (size_type_node, r);
11753 }
11754
11755 /* If either argument is "", return NULL_TREE. */
11756 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11757 /* Evaluate and ignore both arguments in case either one has
11758 side-effects. */
11759 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11760 s1, s2);
11761 return NULL_TREE;
11762 }
11763 }
11764
11765 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11766 to the call.
11767
11768 Return NULL_TREE if no simplification was possible, otherwise return the
11769 simplified form of the call as a tree.
11770
11771 The simplified form may be a constant or other expression which
11772 computes the same value, but in a more efficient manner (including
11773 calls to other builtin functions).
11774
11775 The call may contain arguments which need to be evaluated, but
11776 which are not useful to determine the result of the call. In
11777 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11778 COMPOUND_EXPR will be an argument which must be evaluated.
11779 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11780 COMPOUND_EXPR in the chain will contain the tree for the simplified
11781 form of the builtin function call. */
11782
11783 static tree
11784 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11785 {
11786 if (!validate_arg (s1, POINTER_TYPE)
11787 || !validate_arg (s2, POINTER_TYPE))
11788 return NULL_TREE;
11789 else
11790 {
11791 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11792
11793 /* If both arguments are constants, evaluate at compile-time. */
11794 if (p1 && p2)
11795 {
11796 const size_t r = strcspn (p1, p2);
11797 return build_int_cst (size_type_node, r);
11798 }
11799
11800 /* If the first argument is "", return NULL_TREE. */
11801 if (p1 && *p1 == '\0')
11802 {
11803 /* Evaluate and ignore argument s2 in case it has
11804 side-effects. */
11805 return omit_one_operand_loc (loc, size_type_node,
11806 size_zero_node, s2);
11807 }
11808
11809 /* If the second argument is "", return __builtin_strlen(s1). */
11810 if (p2 && *p2 == '\0')
11811 {
11812 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11813
11814 /* If the replacement _DECL isn't initialized, don't do the
11815 transformation. */
11816 if (!fn)
11817 return NULL_TREE;
11818
11819 return build_call_expr_loc (loc, fn, 1, s1);
11820 }
11821 return NULL_TREE;
11822 }
11823 }
11824
11825 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11826 to the call. IGNORE is true if the value returned
11827 by the builtin will be ignored. UNLOCKED is true is true if this
11828 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11829 the known length of the string. Return NULL_TREE if no simplification
11830 was possible. */
11831
11832 tree
11833 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11834 bool ignore, bool unlocked, tree len)
11835 {
11836 /* If we're using an unlocked function, assume the other unlocked
11837 functions exist explicitly. */
11838 tree const fn_fputc = (unlocked
11839 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11840 : builtin_decl_implicit (BUILT_IN_FPUTC));
11841 tree const fn_fwrite = (unlocked
11842 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11843 : builtin_decl_implicit (BUILT_IN_FWRITE));
11844
11845 /* If the return value is used, don't do the transformation. */
11846 if (!ignore)
11847 return NULL_TREE;
11848
11849 /* Verify the arguments in the original call. */
11850 if (!validate_arg (arg0, POINTER_TYPE)
11851 || !validate_arg (arg1, POINTER_TYPE))
11852 return NULL_TREE;
11853
11854 if (! len)
11855 len = c_strlen (arg0, 0);
11856
11857 /* Get the length of the string passed to fputs. If the length
11858 can't be determined, punt. */
11859 if (!len
11860 || TREE_CODE (len) != INTEGER_CST)
11861 return NULL_TREE;
11862
11863 switch (compare_tree_int (len, 1))
11864 {
11865 case -1: /* length is 0, delete the call entirely . */
11866 return omit_one_operand_loc (loc, integer_type_node,
11867 integer_zero_node, arg1);;
11868
11869 case 0: /* length is 1, call fputc. */
11870 {
11871 const char *p = c_getstr (arg0);
11872
11873 if (p != NULL)
11874 {
11875 if (fn_fputc)
11876 return build_call_expr_loc (loc, fn_fputc, 2,
11877 build_int_cst
11878 (integer_type_node, p[0]), arg1);
11879 else
11880 return NULL_TREE;
11881 }
11882 }
11883 /* FALLTHROUGH */
11884 case 1: /* length is greater than 1, call fwrite. */
11885 {
11886 /* If optimizing for size keep fputs. */
11887 if (optimize_function_for_size_p (cfun))
11888 return NULL_TREE;
11889 /* New argument list transforming fputs(string, stream) to
11890 fwrite(string, 1, len, stream). */
11891 if (fn_fwrite)
11892 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11893 size_one_node, len, arg1);
11894 else
11895 return NULL_TREE;
11896 }
11897 default:
11898 gcc_unreachable ();
11899 }
11900 return NULL_TREE;
11901 }
11902
11903 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11904 produced. False otherwise. This is done so that we don't output the error
11905 or warning twice or three times. */
11906
11907 bool
11908 fold_builtin_next_arg (tree exp, bool va_start_p)
11909 {
11910 tree fntype = TREE_TYPE (current_function_decl);
11911 int nargs = call_expr_nargs (exp);
11912 tree arg;
11913 /* There is good chance the current input_location points inside the
11914 definition of the va_start macro (perhaps on the token for
11915 builtin) in a system header, so warnings will not be emitted.
11916 Use the location in real source code. */
11917 source_location current_location =
11918 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11919 NULL);
11920
11921 if (!stdarg_p (fntype))
11922 {
11923 error ("%<va_start%> used in function with fixed args");
11924 return true;
11925 }
11926
11927 if (va_start_p)
11928 {
11929 if (va_start_p && (nargs != 2))
11930 {
11931 error ("wrong number of arguments to function %<va_start%>");
11932 return true;
11933 }
11934 arg = CALL_EXPR_ARG (exp, 1);
11935 }
11936 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11937 when we checked the arguments and if needed issued a warning. */
11938 else
11939 {
11940 if (nargs == 0)
11941 {
11942 /* Evidently an out of date version of <stdarg.h>; can't validate
11943 va_start's second argument, but can still work as intended. */
11944 warning_at (current_location,
11945 OPT_Wvarargs,
11946 "%<__builtin_next_arg%> called without an argument");
11947 return true;
11948 }
11949 else if (nargs > 1)
11950 {
11951 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11952 return true;
11953 }
11954 arg = CALL_EXPR_ARG (exp, 0);
11955 }
11956
11957 if (TREE_CODE (arg) == SSA_NAME)
11958 arg = SSA_NAME_VAR (arg);
11959
11960 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11961 or __builtin_next_arg (0) the first time we see it, after checking
11962 the arguments and if needed issuing a warning. */
11963 if (!integer_zerop (arg))
11964 {
11965 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11966
11967 /* Strip off all nops for the sake of the comparison. This
11968 is not quite the same as STRIP_NOPS. It does more.
11969 We must also strip off INDIRECT_EXPR for C++ reference
11970 parameters. */
11971 while (CONVERT_EXPR_P (arg)
11972 || TREE_CODE (arg) == INDIRECT_REF)
11973 arg = TREE_OPERAND (arg, 0);
11974 if (arg != last_parm)
11975 {
11976 /* FIXME: Sometimes with the tree optimizers we can get the
11977 not the last argument even though the user used the last
11978 argument. We just warn and set the arg to be the last
11979 argument so that we will get wrong-code because of
11980 it. */
11981 warning_at (current_location,
11982 OPT_Wvarargs,
11983 "second parameter of %<va_start%> not last named argument");
11984 }
11985
11986 /* Undefined by C99 7.15.1.4p4 (va_start):
11987 "If the parameter parmN is declared with the register storage
11988 class, with a function or array type, or with a type that is
11989 not compatible with the type that results after application of
11990 the default argument promotions, the behavior is undefined."
11991 */
11992 else if (DECL_REGISTER (arg))
11993 {
11994 warning_at (current_location,
11995 OPT_Wvarargs,
11996 "undefined behaviour when second parameter of "
11997 "%<va_start%> is declared with %<register%> storage");
11998 }
11999
12000 /* We want to verify the second parameter just once before the tree
12001 optimizers are run and then avoid keeping it in the tree,
12002 as otherwise we could warn even for correct code like:
12003 void foo (int i, ...)
12004 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12005 if (va_start_p)
12006 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12007 else
12008 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12009 }
12010 return false;
12011 }
12012
12013
12014 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12015 ORIG may be null if this is a 2-argument call. We don't attempt to
12016 simplify calls with more than 3 arguments.
12017
12018 Return NULL_TREE if no simplification was possible, otherwise return the
12019 simplified form of the call as a tree. If IGNORED is true, it means that
12020 the caller does not use the returned value of the function. */
12021
12022 static tree
12023 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12024 tree orig, int ignored)
12025 {
12026 tree call, retval;
12027 const char *fmt_str = NULL;
12028
12029 /* Verify the required arguments in the original call. We deal with two
12030 types of sprintf() calls: 'sprintf (str, fmt)' and
12031 'sprintf (dest, "%s", orig)'. */
12032 if (!validate_arg (dest, POINTER_TYPE)
12033 || !validate_arg (fmt, POINTER_TYPE))
12034 return NULL_TREE;
12035 if (orig && !validate_arg (orig, POINTER_TYPE))
12036 return NULL_TREE;
12037
12038 /* Check whether the format is a literal string constant. */
12039 fmt_str = c_getstr (fmt);
12040 if (fmt_str == NULL)
12041 return NULL_TREE;
12042
12043 call = NULL_TREE;
12044 retval = NULL_TREE;
12045
12046 if (!init_target_chars ())
12047 return NULL_TREE;
12048
12049 /* If the format doesn't contain % args or %%, use strcpy. */
12050 if (strchr (fmt_str, target_percent) == NULL)
12051 {
12052 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12053
12054 if (!fn)
12055 return NULL_TREE;
12056
12057 /* Don't optimize sprintf (buf, "abc", ptr++). */
12058 if (orig)
12059 return NULL_TREE;
12060
12061 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12062 'format' is known to contain no % formats. */
12063 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12064 if (!ignored)
12065 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12066 }
12067
12068 /* If the format is "%s", use strcpy if the result isn't used. */
12069 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12070 {
12071 tree fn;
12072 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12073
12074 if (!fn)
12075 return NULL_TREE;
12076
12077 /* Don't crash on sprintf (str1, "%s"). */
12078 if (!orig)
12079 return NULL_TREE;
12080
12081 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12082 if (!ignored)
12083 {
12084 retval = c_strlen (orig, 1);
12085 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12086 return NULL_TREE;
12087 }
12088 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12089 }
12090
12091 if (call && retval)
12092 {
12093 retval = fold_convert_loc
12094 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12095 retval);
12096 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12097 }
12098 else
12099 return call;
12100 }
12101
12102 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12103 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12104 attempt to simplify calls with more than 4 arguments.
12105
12106 Return NULL_TREE if no simplification was possible, otherwise return the
12107 simplified form of the call as a tree. If IGNORED is true, it means that
12108 the caller does not use the returned value of the function. */
12109
12110 static tree
12111 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12112 tree orig, int ignored)
12113 {
12114 tree call, retval;
12115 const char *fmt_str = NULL;
12116 unsigned HOST_WIDE_INT destlen;
12117
12118 /* Verify the required arguments in the original call. We deal with two
12119 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12120 'snprintf (dest, cst, "%s", orig)'. */
12121 if (!validate_arg (dest, POINTER_TYPE)
12122 || !validate_arg (destsize, INTEGER_TYPE)
12123 || !validate_arg (fmt, POINTER_TYPE))
12124 return NULL_TREE;
12125 if (orig && !validate_arg (orig, POINTER_TYPE))
12126 return NULL_TREE;
12127
12128 if (!tree_fits_uhwi_p (destsize))
12129 return NULL_TREE;
12130
12131 /* Check whether the format is a literal string constant. */
12132 fmt_str = c_getstr (fmt);
12133 if (fmt_str == NULL)
12134 return NULL_TREE;
12135
12136 call = NULL_TREE;
12137 retval = NULL_TREE;
12138
12139 if (!init_target_chars ())
12140 return NULL_TREE;
12141
12142 destlen = tree_to_uhwi (destsize);
12143
12144 /* If the format doesn't contain % args or %%, use strcpy. */
12145 if (strchr (fmt_str, target_percent) == NULL)
12146 {
12147 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12148 size_t len = strlen (fmt_str);
12149
12150 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12151 if (orig)
12152 return NULL_TREE;
12153
12154 /* We could expand this as
12155 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12156 or to
12157 memcpy (str, fmt_with_nul_at_cstm1, cst);
12158 but in the former case that might increase code size
12159 and in the latter case grow .rodata section too much.
12160 So punt for now. */
12161 if (len >= destlen)
12162 return NULL_TREE;
12163
12164 if (!fn)
12165 return NULL_TREE;
12166
12167 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12168 'format' is known to contain no % formats and
12169 strlen (fmt) < cst. */
12170 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12171
12172 if (!ignored)
12173 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12174 }
12175
12176 /* If the format is "%s", use strcpy if the result isn't used. */
12177 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12178 {
12179 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12180 unsigned HOST_WIDE_INT origlen;
12181
12182 /* Don't crash on snprintf (str1, cst, "%s"). */
12183 if (!orig)
12184 return NULL_TREE;
12185
12186 retval = c_strlen (orig, 1);
12187 if (!retval || !tree_fits_uhwi_p (retval))
12188 return NULL_TREE;
12189
12190 origlen = tree_to_uhwi (retval);
12191 /* We could expand this as
12192 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12193 or to
12194 memcpy (str1, str2_with_nul_at_cstm1, cst);
12195 but in the former case that might increase code size
12196 and in the latter case grow .rodata section too much.
12197 So punt for now. */
12198 if (origlen >= destlen)
12199 return NULL_TREE;
12200
12201 /* Convert snprintf (str1, cst, "%s", str2) into
12202 strcpy (str1, str2) if strlen (str2) < cst. */
12203 if (!fn)
12204 return NULL_TREE;
12205
12206 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12207
12208 if (ignored)
12209 retval = NULL_TREE;
12210 }
12211
12212 if (call && retval)
12213 {
12214 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12215 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12216 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12217 }
12218 else
12219 return call;
12220 }
12221
12222 /* Expand a call EXP to __builtin_object_size. */
12223
12224 static rtx
12225 expand_builtin_object_size (tree exp)
12226 {
12227 tree ost;
12228 int object_size_type;
12229 tree fndecl = get_callee_fndecl (exp);
12230
12231 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12232 {
12233 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12234 exp, fndecl);
12235 expand_builtin_trap ();
12236 return const0_rtx;
12237 }
12238
12239 ost = CALL_EXPR_ARG (exp, 1);
12240 STRIP_NOPS (ost);
12241
12242 if (TREE_CODE (ost) != INTEGER_CST
12243 || tree_int_cst_sgn (ost) < 0
12244 || compare_tree_int (ost, 3) > 0)
12245 {
12246 error ("%Klast argument of %D is not integer constant between 0 and 3",
12247 exp, fndecl);
12248 expand_builtin_trap ();
12249 return const0_rtx;
12250 }
12251
12252 object_size_type = tree_to_shwi (ost);
12253
12254 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12255 }
12256
12257 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12258 FCODE is the BUILT_IN_* to use.
12259 Return NULL_RTX if we failed; the caller should emit a normal call,
12260 otherwise try to get the result in TARGET, if convenient (and in
12261 mode MODE if that's convenient). */
12262
12263 static rtx
12264 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12265 enum built_in_function fcode)
12266 {
12267 tree dest, src, len, size;
12268
12269 if (!validate_arglist (exp,
12270 POINTER_TYPE,
12271 fcode == BUILT_IN_MEMSET_CHK
12272 ? INTEGER_TYPE : POINTER_TYPE,
12273 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12274 return NULL_RTX;
12275
12276 dest = CALL_EXPR_ARG (exp, 0);
12277 src = CALL_EXPR_ARG (exp, 1);
12278 len = CALL_EXPR_ARG (exp, 2);
12279 size = CALL_EXPR_ARG (exp, 3);
12280
12281 if (! tree_fits_uhwi_p (size))
12282 return NULL_RTX;
12283
12284 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12285 {
12286 tree fn;
12287
12288 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12289 {
12290 warning_at (tree_nonartificial_location (exp),
12291 0, "%Kcall to %D will always overflow destination buffer",
12292 exp, get_callee_fndecl (exp));
12293 return NULL_RTX;
12294 }
12295
12296 fn = NULL_TREE;
12297 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12298 mem{cpy,pcpy,move,set} is available. */
12299 switch (fcode)
12300 {
12301 case BUILT_IN_MEMCPY_CHK:
12302 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12303 break;
12304 case BUILT_IN_MEMPCPY_CHK:
12305 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12306 break;
12307 case BUILT_IN_MEMMOVE_CHK:
12308 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12309 break;
12310 case BUILT_IN_MEMSET_CHK:
12311 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12312 break;
12313 default:
12314 break;
12315 }
12316
12317 if (! fn)
12318 return NULL_RTX;
12319
12320 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12321 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12322 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12323 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12324 }
12325 else if (fcode == BUILT_IN_MEMSET_CHK)
12326 return NULL_RTX;
12327 else
12328 {
12329 unsigned int dest_align = get_pointer_alignment (dest);
12330
12331 /* If DEST is not a pointer type, call the normal function. */
12332 if (dest_align == 0)
12333 return NULL_RTX;
12334
12335 /* If SRC and DEST are the same (and not volatile), do nothing. */
12336 if (operand_equal_p (src, dest, 0))
12337 {
12338 tree expr;
12339
12340 if (fcode != BUILT_IN_MEMPCPY_CHK)
12341 {
12342 /* Evaluate and ignore LEN in case it has side-effects. */
12343 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12344 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12345 }
12346
12347 expr = fold_build_pointer_plus (dest, len);
12348 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12349 }
12350
12351 /* __memmove_chk special case. */
12352 if (fcode == BUILT_IN_MEMMOVE_CHK)
12353 {
12354 unsigned int src_align = get_pointer_alignment (src);
12355
12356 if (src_align == 0)
12357 return NULL_RTX;
12358
12359 /* If src is categorized for a readonly section we can use
12360 normal __memcpy_chk. */
12361 if (readonly_data_expr (src))
12362 {
12363 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12364 if (!fn)
12365 return NULL_RTX;
12366 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12367 dest, src, len, size);
12368 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12369 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12370 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12371 }
12372 }
12373 return NULL_RTX;
12374 }
12375 }
12376
12377 /* Emit warning if a buffer overflow is detected at compile time. */
12378
12379 static void
12380 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12381 {
12382 int is_strlen = 0;
12383 tree len, size;
12384 location_t loc = tree_nonartificial_location (exp);
12385
12386 switch (fcode)
12387 {
12388 case BUILT_IN_STRCPY_CHK:
12389 case BUILT_IN_STPCPY_CHK:
12390 /* For __strcat_chk the warning will be emitted only if overflowing
12391 by at least strlen (dest) + 1 bytes. */
12392 case BUILT_IN_STRCAT_CHK:
12393 len = CALL_EXPR_ARG (exp, 1);
12394 size = CALL_EXPR_ARG (exp, 2);
12395 is_strlen = 1;
12396 break;
12397 case BUILT_IN_STRNCAT_CHK:
12398 case BUILT_IN_STRNCPY_CHK:
12399 case BUILT_IN_STPNCPY_CHK:
12400 len = CALL_EXPR_ARG (exp, 2);
12401 size = CALL_EXPR_ARG (exp, 3);
12402 break;
12403 case BUILT_IN_SNPRINTF_CHK:
12404 case BUILT_IN_VSNPRINTF_CHK:
12405 len = CALL_EXPR_ARG (exp, 1);
12406 size = CALL_EXPR_ARG (exp, 3);
12407 break;
12408 default:
12409 gcc_unreachable ();
12410 }
12411
12412 if (!len || !size)
12413 return;
12414
12415 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12416 return;
12417
12418 if (is_strlen)
12419 {
12420 len = c_strlen (len, 1);
12421 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12422 return;
12423 }
12424 else if (fcode == BUILT_IN_STRNCAT_CHK)
12425 {
12426 tree src = CALL_EXPR_ARG (exp, 1);
12427 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12428 return;
12429 src = c_strlen (src, 1);
12430 if (! src || ! tree_fits_uhwi_p (src))
12431 {
12432 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12433 exp, get_callee_fndecl (exp));
12434 return;
12435 }
12436 else if (tree_int_cst_lt (src, size))
12437 return;
12438 }
12439 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12440 return;
12441
12442 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12443 exp, get_callee_fndecl (exp));
12444 }
12445
12446 /* Emit warning if a buffer overflow is detected at compile time
12447 in __sprintf_chk/__vsprintf_chk calls. */
12448
12449 static void
12450 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12451 {
12452 tree size, len, fmt;
12453 const char *fmt_str;
12454 int nargs = call_expr_nargs (exp);
12455
12456 /* Verify the required arguments in the original call. */
12457
12458 if (nargs < 4)
12459 return;
12460 size = CALL_EXPR_ARG (exp, 2);
12461 fmt = CALL_EXPR_ARG (exp, 3);
12462
12463 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12464 return;
12465
12466 /* Check whether the format is a literal string constant. */
12467 fmt_str = c_getstr (fmt);
12468 if (fmt_str == NULL)
12469 return;
12470
12471 if (!init_target_chars ())
12472 return;
12473
12474 /* If the format doesn't contain % args or %%, we know its size. */
12475 if (strchr (fmt_str, target_percent) == 0)
12476 len = build_int_cstu (size_type_node, strlen (fmt_str));
12477 /* If the format is "%s" and first ... argument is a string literal,
12478 we know it too. */
12479 else if (fcode == BUILT_IN_SPRINTF_CHK
12480 && strcmp (fmt_str, target_percent_s) == 0)
12481 {
12482 tree arg;
12483
12484 if (nargs < 5)
12485 return;
12486 arg = CALL_EXPR_ARG (exp, 4);
12487 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12488 return;
12489
12490 len = c_strlen (arg, 1);
12491 if (!len || ! tree_fits_uhwi_p (len))
12492 return;
12493 }
12494 else
12495 return;
12496
12497 if (! tree_int_cst_lt (len, size))
12498 warning_at (tree_nonartificial_location (exp),
12499 0, "%Kcall to %D will always overflow destination buffer",
12500 exp, get_callee_fndecl (exp));
12501 }
12502
12503 /* Emit warning if a free is called with address of a variable. */
12504
12505 static void
12506 maybe_emit_free_warning (tree exp)
12507 {
12508 tree arg = CALL_EXPR_ARG (exp, 0);
12509
12510 STRIP_NOPS (arg);
12511 if (TREE_CODE (arg) != ADDR_EXPR)
12512 return;
12513
12514 arg = get_base_address (TREE_OPERAND (arg, 0));
12515 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12516 return;
12517
12518 if (SSA_VAR_P (arg))
12519 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12520 "%Kattempt to free a non-heap object %qD", exp, arg);
12521 else
12522 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12523 "%Kattempt to free a non-heap object", exp);
12524 }
12525
12526 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12527 if possible. */
12528
12529 static tree
12530 fold_builtin_object_size (tree ptr, tree ost)
12531 {
12532 unsigned HOST_WIDE_INT bytes;
12533 int object_size_type;
12534
12535 if (!validate_arg (ptr, POINTER_TYPE)
12536 || !validate_arg (ost, INTEGER_TYPE))
12537 return NULL_TREE;
12538
12539 STRIP_NOPS (ost);
12540
12541 if (TREE_CODE (ost) != INTEGER_CST
12542 || tree_int_cst_sgn (ost) < 0
12543 || compare_tree_int (ost, 3) > 0)
12544 return NULL_TREE;
12545
12546 object_size_type = tree_to_shwi (ost);
12547
12548 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12549 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12550 and (size_t) 0 for types 2 and 3. */
12551 if (TREE_SIDE_EFFECTS (ptr))
12552 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12553
12554 if (TREE_CODE (ptr) == ADDR_EXPR)
12555 {
12556 bytes = compute_builtin_object_size (ptr, object_size_type);
12557 if (wi::fits_to_tree_p (bytes, size_type_node))
12558 return build_int_cstu (size_type_node, bytes);
12559 }
12560 else if (TREE_CODE (ptr) == SSA_NAME)
12561 {
12562 /* If object size is not known yet, delay folding until
12563 later. Maybe subsequent passes will help determining
12564 it. */
12565 bytes = compute_builtin_object_size (ptr, object_size_type);
12566 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12567 && wi::fits_to_tree_p (bytes, size_type_node))
12568 return build_int_cstu (size_type_node, bytes);
12569 }
12570
12571 return NULL_TREE;
12572 }
12573
12574 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12575 DEST, SRC, LEN, and SIZE are the arguments to the call.
12576 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12577 code of the builtin. If MAXLEN is not NULL, it is maximum length
12578 passed as third argument. */
12579
12580 tree
12581 fold_builtin_memory_chk (location_t loc, tree fndecl,
12582 tree dest, tree src, tree len, tree size,
12583 tree maxlen, bool ignore,
12584 enum built_in_function fcode)
12585 {
12586 tree fn;
12587
12588 if (!validate_arg (dest, POINTER_TYPE)
12589 || !validate_arg (src,
12590 (fcode == BUILT_IN_MEMSET_CHK
12591 ? INTEGER_TYPE : POINTER_TYPE))
12592 || !validate_arg (len, INTEGER_TYPE)
12593 || !validate_arg (size, INTEGER_TYPE))
12594 return NULL_TREE;
12595
12596 /* If SRC and DEST are the same (and not volatile), return DEST
12597 (resp. DEST+LEN for __mempcpy_chk). */
12598 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12599 {
12600 if (fcode != BUILT_IN_MEMPCPY_CHK)
12601 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12602 dest, len);
12603 else
12604 {
12605 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12606 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12607 }
12608 }
12609
12610 if (! tree_fits_uhwi_p (size))
12611 return NULL_TREE;
12612
12613 if (! integer_all_onesp (size))
12614 {
12615 if (! tree_fits_uhwi_p (len))
12616 {
12617 /* If LEN is not constant, try MAXLEN too.
12618 For MAXLEN only allow optimizing into non-_ocs function
12619 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12620 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12621 {
12622 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12623 {
12624 /* (void) __mempcpy_chk () can be optimized into
12625 (void) __memcpy_chk (). */
12626 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12627 if (!fn)
12628 return NULL_TREE;
12629
12630 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12631 }
12632 return NULL_TREE;
12633 }
12634 }
12635 else
12636 maxlen = len;
12637
12638 if (tree_int_cst_lt (size, maxlen))
12639 return NULL_TREE;
12640 }
12641
12642 fn = NULL_TREE;
12643 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12644 mem{cpy,pcpy,move,set} is available. */
12645 switch (fcode)
12646 {
12647 case BUILT_IN_MEMCPY_CHK:
12648 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12649 break;
12650 case BUILT_IN_MEMPCPY_CHK:
12651 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12652 break;
12653 case BUILT_IN_MEMMOVE_CHK:
12654 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12655 break;
12656 case BUILT_IN_MEMSET_CHK:
12657 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12658 break;
12659 default:
12660 break;
12661 }
12662
12663 if (!fn)
12664 return NULL_TREE;
12665
12666 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12667 }
12668
12669 /* Fold a call to the __st[rp]cpy_chk builtin.
12670 DEST, SRC, and SIZE are the arguments to the call.
12671 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12672 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12673 strings passed as second argument. */
12674
12675 tree
12676 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12677 tree src, tree size,
12678 tree maxlen, bool ignore,
12679 enum built_in_function fcode)
12680 {
12681 tree len, fn;
12682
12683 if (!validate_arg (dest, POINTER_TYPE)
12684 || !validate_arg (src, POINTER_TYPE)
12685 || !validate_arg (size, INTEGER_TYPE))
12686 return NULL_TREE;
12687
12688 /* If SRC and DEST are the same (and not volatile), return DEST. */
12689 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12690 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12691
12692 if (! tree_fits_uhwi_p (size))
12693 return NULL_TREE;
12694
12695 if (! integer_all_onesp (size))
12696 {
12697 len = c_strlen (src, 1);
12698 if (! len || ! tree_fits_uhwi_p (len))
12699 {
12700 /* If LEN is not constant, try MAXLEN too.
12701 For MAXLEN only allow optimizing into non-_ocs function
12702 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12703 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12704 {
12705 if (fcode == BUILT_IN_STPCPY_CHK)
12706 {
12707 if (! ignore)
12708 return NULL_TREE;
12709
12710 /* If return value of __stpcpy_chk is ignored,
12711 optimize into __strcpy_chk. */
12712 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12713 if (!fn)
12714 return NULL_TREE;
12715
12716 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12717 }
12718
12719 if (! len || TREE_SIDE_EFFECTS (len))
12720 return NULL_TREE;
12721
12722 /* If c_strlen returned something, but not a constant,
12723 transform __strcpy_chk into __memcpy_chk. */
12724 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12725 if (!fn)
12726 return NULL_TREE;
12727
12728 len = fold_convert_loc (loc, size_type_node, len);
12729 len = size_binop_loc (loc, PLUS_EXPR, len,
12730 build_int_cst (size_type_node, 1));
12731 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12732 build_call_expr_loc (loc, fn, 4,
12733 dest, src, len, size));
12734 }
12735 }
12736 else
12737 maxlen = len;
12738
12739 if (! tree_int_cst_lt (maxlen, size))
12740 return NULL_TREE;
12741 }
12742
12743 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12744 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12745 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12746 if (!fn)
12747 return NULL_TREE;
12748
12749 return build_call_expr_loc (loc, fn, 2, dest, src);
12750 }
12751
12752 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12753 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12754 length passed as third argument. IGNORE is true if return value can be
12755 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12756
12757 tree
12758 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12759 tree len, tree size, tree maxlen, bool ignore,
12760 enum built_in_function fcode)
12761 {
12762 tree fn;
12763
12764 if (!validate_arg (dest, POINTER_TYPE)
12765 || !validate_arg (src, POINTER_TYPE)
12766 || !validate_arg (len, INTEGER_TYPE)
12767 || !validate_arg (size, INTEGER_TYPE))
12768 return NULL_TREE;
12769
12770 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12771 {
12772 /* If return value of __stpncpy_chk is ignored,
12773 optimize into __strncpy_chk. */
12774 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12775 if (fn)
12776 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12777 }
12778
12779 if (! tree_fits_uhwi_p (size))
12780 return NULL_TREE;
12781
12782 if (! integer_all_onesp (size))
12783 {
12784 if (! tree_fits_uhwi_p (len))
12785 {
12786 /* If LEN is not constant, try MAXLEN too.
12787 For MAXLEN only allow optimizing into non-_ocs function
12788 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12789 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12790 return NULL_TREE;
12791 }
12792 else
12793 maxlen = len;
12794
12795 if (tree_int_cst_lt (size, maxlen))
12796 return NULL_TREE;
12797 }
12798
12799 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12800 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12801 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12802 if (!fn)
12803 return NULL_TREE;
12804
12805 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12806 }
12807
12808 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12809 are the arguments to the call. */
12810
12811 static tree
12812 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12813 tree src, tree size)
12814 {
12815 tree fn;
12816 const char *p;
12817
12818 if (!validate_arg (dest, POINTER_TYPE)
12819 || !validate_arg (src, POINTER_TYPE)
12820 || !validate_arg (size, INTEGER_TYPE))
12821 return NULL_TREE;
12822
12823 p = c_getstr (src);
12824 /* If the SRC parameter is "", return DEST. */
12825 if (p && *p == '\0')
12826 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12827
12828 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12829 return NULL_TREE;
12830
12831 /* If __builtin_strcat_chk is used, assume strcat is available. */
12832 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12833 if (!fn)
12834 return NULL_TREE;
12835
12836 return build_call_expr_loc (loc, fn, 2, dest, src);
12837 }
12838
12839 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12840 LEN, and SIZE. */
12841
12842 static tree
12843 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12844 tree dest, tree src, tree len, tree size)
12845 {
12846 tree fn;
12847 const char *p;
12848
12849 if (!validate_arg (dest, POINTER_TYPE)
12850 || !validate_arg (src, POINTER_TYPE)
12851 || !validate_arg (size, INTEGER_TYPE)
12852 || !validate_arg (size, INTEGER_TYPE))
12853 return NULL_TREE;
12854
12855 p = c_getstr (src);
12856 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12857 if (p && *p == '\0')
12858 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12859 else if (integer_zerop (len))
12860 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12861
12862 if (! tree_fits_uhwi_p (size))
12863 return NULL_TREE;
12864
12865 if (! integer_all_onesp (size))
12866 {
12867 tree src_len = c_strlen (src, 1);
12868 if (src_len
12869 && tree_fits_uhwi_p (src_len)
12870 && tree_fits_uhwi_p (len)
12871 && ! tree_int_cst_lt (len, src_len))
12872 {
12873 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12874 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
12875 if (!fn)
12876 return NULL_TREE;
12877
12878 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12879 }
12880 return NULL_TREE;
12881 }
12882
12883 /* If __builtin_strncat_chk is used, assume strncat is available. */
12884 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
12885 if (!fn)
12886 return NULL_TREE;
12887
12888 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12889 }
12890
12891 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12892 Return NULL_TREE if a normal call should be emitted rather than
12893 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12894 or BUILT_IN_VSPRINTF_CHK. */
12895
12896 static tree
12897 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12898 enum built_in_function fcode)
12899 {
12900 tree dest, size, len, fn, fmt, flag;
12901 const char *fmt_str;
12902
12903 /* Verify the required arguments in the original call. */
12904 if (nargs < 4)
12905 return NULL_TREE;
12906 dest = args[0];
12907 if (!validate_arg (dest, POINTER_TYPE))
12908 return NULL_TREE;
12909 flag = args[1];
12910 if (!validate_arg (flag, INTEGER_TYPE))
12911 return NULL_TREE;
12912 size = args[2];
12913 if (!validate_arg (size, INTEGER_TYPE))
12914 return NULL_TREE;
12915 fmt = args[3];
12916 if (!validate_arg (fmt, POINTER_TYPE))
12917 return NULL_TREE;
12918
12919 if (! tree_fits_uhwi_p (size))
12920 return NULL_TREE;
12921
12922 len = NULL_TREE;
12923
12924 if (!init_target_chars ())
12925 return NULL_TREE;
12926
12927 /* Check whether the format is a literal string constant. */
12928 fmt_str = c_getstr (fmt);
12929 if (fmt_str != NULL)
12930 {
12931 /* If the format doesn't contain % args or %%, we know the size. */
12932 if (strchr (fmt_str, target_percent) == 0)
12933 {
12934 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12935 len = build_int_cstu (size_type_node, strlen (fmt_str));
12936 }
12937 /* If the format is "%s" and first ... argument is a string literal,
12938 we know the size too. */
12939 else if (fcode == BUILT_IN_SPRINTF_CHK
12940 && strcmp (fmt_str, target_percent_s) == 0)
12941 {
12942 tree arg;
12943
12944 if (nargs == 5)
12945 {
12946 arg = args[4];
12947 if (validate_arg (arg, POINTER_TYPE))
12948 {
12949 len = c_strlen (arg, 1);
12950 if (! len || ! tree_fits_uhwi_p (len))
12951 len = NULL_TREE;
12952 }
12953 }
12954 }
12955 }
12956
12957 if (! integer_all_onesp (size))
12958 {
12959 if (! len || ! tree_int_cst_lt (len, size))
12960 return NULL_TREE;
12961 }
12962
12963 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12964 or if format doesn't contain % chars or is "%s". */
12965 if (! integer_zerop (flag))
12966 {
12967 if (fmt_str == NULL)
12968 return NULL_TREE;
12969 if (strchr (fmt_str, target_percent) != NULL
12970 && strcmp (fmt_str, target_percent_s))
12971 return NULL_TREE;
12972 }
12973
12974 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12975 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
12976 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
12977 if (!fn)
12978 return NULL_TREE;
12979
12980 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12981 }
12982
12983 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12984 a normal call should be emitted rather than expanding the function
12985 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12986
12987 static tree
12988 fold_builtin_sprintf_chk (location_t loc, tree exp,
12989 enum built_in_function fcode)
12990 {
12991 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12992 CALL_EXPR_ARGP (exp), fcode);
12993 }
12994
12995 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12996 NULL_TREE if a normal call should be emitted rather than expanding
12997 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12998 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12999 passed as second argument. */
13000
13001 static tree
13002 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13003 tree maxlen, enum built_in_function fcode)
13004 {
13005 tree dest, size, len, fn, fmt, flag;
13006 const char *fmt_str;
13007
13008 /* Verify the required arguments in the original call. */
13009 if (nargs < 5)
13010 return NULL_TREE;
13011 dest = args[0];
13012 if (!validate_arg (dest, POINTER_TYPE))
13013 return NULL_TREE;
13014 len = args[1];
13015 if (!validate_arg (len, INTEGER_TYPE))
13016 return NULL_TREE;
13017 flag = args[2];
13018 if (!validate_arg (flag, INTEGER_TYPE))
13019 return NULL_TREE;
13020 size = args[3];
13021 if (!validate_arg (size, INTEGER_TYPE))
13022 return NULL_TREE;
13023 fmt = args[4];
13024 if (!validate_arg (fmt, POINTER_TYPE))
13025 return NULL_TREE;
13026
13027 if (! tree_fits_uhwi_p (size))
13028 return NULL_TREE;
13029
13030 if (! integer_all_onesp (size))
13031 {
13032 if (! tree_fits_uhwi_p (len))
13033 {
13034 /* If LEN is not constant, try MAXLEN too.
13035 For MAXLEN only allow optimizing into non-_ocs function
13036 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13037 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13038 return NULL_TREE;
13039 }
13040 else
13041 maxlen = len;
13042
13043 if (tree_int_cst_lt (size, maxlen))
13044 return NULL_TREE;
13045 }
13046
13047 if (!init_target_chars ())
13048 return NULL_TREE;
13049
13050 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13051 or if format doesn't contain % chars or is "%s". */
13052 if (! integer_zerop (flag))
13053 {
13054 fmt_str = c_getstr (fmt);
13055 if (fmt_str == NULL)
13056 return NULL_TREE;
13057 if (strchr (fmt_str, target_percent) != NULL
13058 && strcmp (fmt_str, target_percent_s))
13059 return NULL_TREE;
13060 }
13061
13062 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13063 available. */
13064 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13065 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13066 if (!fn)
13067 return NULL_TREE;
13068
13069 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13070 }
13071
13072 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13073 a normal call should be emitted rather than expanding the function
13074 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13075 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13076 passed as second argument. */
13077
13078 static tree
13079 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13080 enum built_in_function fcode)
13081 {
13082 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13083 CALL_EXPR_ARGP (exp), maxlen, fcode);
13084 }
13085
13086 /* Builtins with folding operations that operate on "..." arguments
13087 need special handling; we need to store the arguments in a convenient
13088 data structure before attempting any folding. Fortunately there are
13089 only a few builtins that fall into this category. FNDECL is the
13090 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13091 result of the function call is ignored. */
13092
13093 static tree
13094 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13095 bool ignore ATTRIBUTE_UNUSED)
13096 {
13097 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13098 tree ret = NULL_TREE;
13099
13100 switch (fcode)
13101 {
13102 case BUILT_IN_SPRINTF_CHK:
13103 case BUILT_IN_VSPRINTF_CHK:
13104 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13105 break;
13106
13107 case BUILT_IN_SNPRINTF_CHK:
13108 case BUILT_IN_VSNPRINTF_CHK:
13109 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13110 break;
13111
13112 case BUILT_IN_FPCLASSIFY:
13113 ret = fold_builtin_fpclassify (loc, exp);
13114 break;
13115
13116 default:
13117 break;
13118 }
13119 if (ret)
13120 {
13121 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13122 SET_EXPR_LOCATION (ret, loc);
13123 TREE_NO_WARNING (ret) = 1;
13124 return ret;
13125 }
13126 return NULL_TREE;
13127 }
13128
13129 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13130 FMT and ARG are the arguments to the call; we don't fold cases with
13131 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13132
13133 Return NULL_TREE if no simplification was possible, otherwise return the
13134 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13135 code of the function to be simplified. */
13136
13137 static tree
13138 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13139 tree arg, bool ignore,
13140 enum built_in_function fcode)
13141 {
13142 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13143 const char *fmt_str = NULL;
13144
13145 /* If the return value is used, don't do the transformation. */
13146 if (! ignore)
13147 return NULL_TREE;
13148
13149 /* Verify the required arguments in the original call. */
13150 if (!validate_arg (fmt, POINTER_TYPE))
13151 return NULL_TREE;
13152
13153 /* Check whether the format is a literal string constant. */
13154 fmt_str = c_getstr (fmt);
13155 if (fmt_str == NULL)
13156 return NULL_TREE;
13157
13158 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13159 {
13160 /* If we're using an unlocked function, assume the other
13161 unlocked functions exist explicitly. */
13162 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13163 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13164 }
13165 else
13166 {
13167 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13168 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13169 }
13170
13171 if (!init_target_chars ())
13172 return NULL_TREE;
13173
13174 if (strcmp (fmt_str, target_percent_s) == 0
13175 || strchr (fmt_str, target_percent) == NULL)
13176 {
13177 const char *str;
13178
13179 if (strcmp (fmt_str, target_percent_s) == 0)
13180 {
13181 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13182 return NULL_TREE;
13183
13184 if (!arg || !validate_arg (arg, POINTER_TYPE))
13185 return NULL_TREE;
13186
13187 str = c_getstr (arg);
13188 if (str == NULL)
13189 return NULL_TREE;
13190 }
13191 else
13192 {
13193 /* The format specifier doesn't contain any '%' characters. */
13194 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13195 && arg)
13196 return NULL_TREE;
13197 str = fmt_str;
13198 }
13199
13200 /* If the string was "", printf does nothing. */
13201 if (str[0] == '\0')
13202 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13203
13204 /* If the string has length of 1, call putchar. */
13205 if (str[1] == '\0')
13206 {
13207 /* Given printf("c"), (where c is any one character,)
13208 convert "c"[0] to an int and pass that to the replacement
13209 function. */
13210 newarg = build_int_cst (integer_type_node, str[0]);
13211 if (fn_putchar)
13212 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13213 }
13214 else
13215 {
13216 /* If the string was "string\n", call puts("string"). */
13217 size_t len = strlen (str);
13218 if ((unsigned char)str[len - 1] == target_newline
13219 && (size_t) (int) len == len
13220 && (int) len > 0)
13221 {
13222 char *newstr;
13223 tree offset_node, string_cst;
13224
13225 /* Create a NUL-terminated string that's one char shorter
13226 than the original, stripping off the trailing '\n'. */
13227 newarg = build_string_literal (len, str);
13228 string_cst = string_constant (newarg, &offset_node);
13229 gcc_checking_assert (string_cst
13230 && (TREE_STRING_LENGTH (string_cst)
13231 == (int) len)
13232 && integer_zerop (offset_node)
13233 && (unsigned char)
13234 TREE_STRING_POINTER (string_cst)[len - 1]
13235 == target_newline);
13236 /* build_string_literal creates a new STRING_CST,
13237 modify it in place to avoid double copying. */
13238 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13239 newstr[len - 1] = '\0';
13240 if (fn_puts)
13241 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13242 }
13243 else
13244 /* We'd like to arrange to call fputs(string,stdout) here,
13245 but we need stdout and don't have a way to get it yet. */
13246 return NULL_TREE;
13247 }
13248 }
13249
13250 /* The other optimizations can be done only on the non-va_list variants. */
13251 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13252 return NULL_TREE;
13253
13254 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13255 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13256 {
13257 if (!arg || !validate_arg (arg, POINTER_TYPE))
13258 return NULL_TREE;
13259 if (fn_puts)
13260 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13261 }
13262
13263 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13264 else if (strcmp (fmt_str, target_percent_c) == 0)
13265 {
13266 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13267 return NULL_TREE;
13268 if (fn_putchar)
13269 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13270 }
13271
13272 if (!call)
13273 return NULL_TREE;
13274
13275 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13276 }
13277
13278 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13279 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13280 more than 3 arguments, and ARG may be null in the 2-argument case.
13281
13282 Return NULL_TREE if no simplification was possible, otherwise return the
13283 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13284 code of the function to be simplified. */
13285
13286 static tree
13287 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13288 tree fmt, tree arg, bool ignore,
13289 enum built_in_function fcode)
13290 {
13291 tree fn_fputc, fn_fputs, call = NULL_TREE;
13292 const char *fmt_str = NULL;
13293
13294 /* If the return value is used, don't do the transformation. */
13295 if (! ignore)
13296 return NULL_TREE;
13297
13298 /* Verify the required arguments in the original call. */
13299 if (!validate_arg (fp, POINTER_TYPE))
13300 return NULL_TREE;
13301 if (!validate_arg (fmt, POINTER_TYPE))
13302 return NULL_TREE;
13303
13304 /* Check whether the format is a literal string constant. */
13305 fmt_str = c_getstr (fmt);
13306 if (fmt_str == NULL)
13307 return NULL_TREE;
13308
13309 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13310 {
13311 /* If we're using an unlocked function, assume the other
13312 unlocked functions exist explicitly. */
13313 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13314 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13315 }
13316 else
13317 {
13318 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13319 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13320 }
13321
13322 if (!init_target_chars ())
13323 return NULL_TREE;
13324
13325 /* If the format doesn't contain % args or %%, use strcpy. */
13326 if (strchr (fmt_str, target_percent) == NULL)
13327 {
13328 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13329 && arg)
13330 return NULL_TREE;
13331
13332 /* If the format specifier was "", fprintf does nothing. */
13333 if (fmt_str[0] == '\0')
13334 {
13335 /* If FP has side-effects, just wait until gimplification is
13336 done. */
13337 if (TREE_SIDE_EFFECTS (fp))
13338 return NULL_TREE;
13339
13340 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13341 }
13342
13343 /* When "string" doesn't contain %, replace all cases of
13344 fprintf (fp, string) with fputs (string, fp). The fputs
13345 builtin will take care of special cases like length == 1. */
13346 if (fn_fputs)
13347 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13348 }
13349
13350 /* The other optimizations can be done only on the non-va_list variants. */
13351 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13352 return NULL_TREE;
13353
13354 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13355 else if (strcmp (fmt_str, target_percent_s) == 0)
13356 {
13357 if (!arg || !validate_arg (arg, POINTER_TYPE))
13358 return NULL_TREE;
13359 if (fn_fputs)
13360 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13361 }
13362
13363 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13364 else if (strcmp (fmt_str, target_percent_c) == 0)
13365 {
13366 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13367 return NULL_TREE;
13368 if (fn_fputc)
13369 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13370 }
13371
13372 if (!call)
13373 return NULL_TREE;
13374 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13375 }
13376
13377 /* Initialize format string characters in the target charset. */
13378
13379 static bool
13380 init_target_chars (void)
13381 {
13382 static bool init;
13383 if (!init)
13384 {
13385 target_newline = lang_hooks.to_target_charset ('\n');
13386 target_percent = lang_hooks.to_target_charset ('%');
13387 target_c = lang_hooks.to_target_charset ('c');
13388 target_s = lang_hooks.to_target_charset ('s');
13389 if (target_newline == 0 || target_percent == 0 || target_c == 0
13390 || target_s == 0)
13391 return false;
13392
13393 target_percent_c[0] = target_percent;
13394 target_percent_c[1] = target_c;
13395 target_percent_c[2] = '\0';
13396
13397 target_percent_s[0] = target_percent;
13398 target_percent_s[1] = target_s;
13399 target_percent_s[2] = '\0';
13400
13401 target_percent_s_newline[0] = target_percent;
13402 target_percent_s_newline[1] = target_s;
13403 target_percent_s_newline[2] = target_newline;
13404 target_percent_s_newline[3] = '\0';
13405
13406 init = true;
13407 }
13408 return true;
13409 }
13410
13411 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13412 and no overflow/underflow occurred. INEXACT is true if M was not
13413 exactly calculated. TYPE is the tree type for the result. This
13414 function assumes that you cleared the MPFR flags and then
13415 calculated M to see if anything subsequently set a flag prior to
13416 entering this function. Return NULL_TREE if any checks fail. */
13417
13418 static tree
13419 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13420 {
13421 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13422 overflow/underflow occurred. If -frounding-math, proceed iff the
13423 result of calling FUNC was exact. */
13424 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13425 && (!flag_rounding_math || !inexact))
13426 {
13427 REAL_VALUE_TYPE rr;
13428
13429 real_from_mpfr (&rr, m, type, GMP_RNDN);
13430 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13431 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13432 but the mpft_t is not, then we underflowed in the
13433 conversion. */
13434 if (real_isfinite (&rr)
13435 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13436 {
13437 REAL_VALUE_TYPE rmode;
13438
13439 real_convert (&rmode, TYPE_MODE (type), &rr);
13440 /* Proceed iff the specified mode can hold the value. */
13441 if (real_identical (&rmode, &rr))
13442 return build_real (type, rmode);
13443 }
13444 }
13445 return NULL_TREE;
13446 }
13447
13448 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13449 number and no overflow/underflow occurred. INEXACT is true if M
13450 was not exactly calculated. TYPE is the tree type for the result.
13451 This function assumes that you cleared the MPFR flags and then
13452 calculated M to see if anything subsequently set a flag prior to
13453 entering this function. Return NULL_TREE if any checks fail, if
13454 FORCE_CONVERT is true, then bypass the checks. */
13455
13456 static tree
13457 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13458 {
13459 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13460 overflow/underflow occurred. If -frounding-math, proceed iff the
13461 result of calling FUNC was exact. */
13462 if (force_convert
13463 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13464 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13465 && (!flag_rounding_math || !inexact)))
13466 {
13467 REAL_VALUE_TYPE re, im;
13468
13469 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13470 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13471 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13472 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13473 but the mpft_t is not, then we underflowed in the
13474 conversion. */
13475 if (force_convert
13476 || (real_isfinite (&re) && real_isfinite (&im)
13477 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13478 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13479 {
13480 REAL_VALUE_TYPE re_mode, im_mode;
13481
13482 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13483 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13484 /* Proceed iff the specified mode can hold the value. */
13485 if (force_convert
13486 || (real_identical (&re_mode, &re)
13487 && real_identical (&im_mode, &im)))
13488 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13489 build_real (TREE_TYPE (type), im_mode));
13490 }
13491 }
13492 return NULL_TREE;
13493 }
13494
13495 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13496 FUNC on it and return the resulting value as a tree with type TYPE.
13497 If MIN and/or MAX are not NULL, then the supplied ARG must be
13498 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13499 acceptable values, otherwise they are not. The mpfr precision is
13500 set to the precision of TYPE. We assume that function FUNC returns
13501 zero if the result could be calculated exactly within the requested
13502 precision. */
13503
13504 static tree
13505 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13506 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13507 bool inclusive)
13508 {
13509 tree result = NULL_TREE;
13510
13511 STRIP_NOPS (arg);
13512
13513 /* To proceed, MPFR must exactly represent the target floating point
13514 format, which only happens when the target base equals two. */
13515 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13516 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13517 {
13518 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13519
13520 if (real_isfinite (ra)
13521 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13522 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13523 {
13524 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13525 const int prec = fmt->p;
13526 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13527 int inexact;
13528 mpfr_t m;
13529
13530 mpfr_init2 (m, prec);
13531 mpfr_from_real (m, ra, GMP_RNDN);
13532 mpfr_clear_flags ();
13533 inexact = func (m, m, rnd);
13534 result = do_mpfr_ckconv (m, type, inexact);
13535 mpfr_clear (m);
13536 }
13537 }
13538
13539 return result;
13540 }
13541
13542 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13543 FUNC on it and return the resulting value as a tree with type TYPE.
13544 The mpfr precision is set to the precision of TYPE. We assume that
13545 function FUNC returns zero if the result could be calculated
13546 exactly within the requested precision. */
13547
13548 static tree
13549 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13550 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13551 {
13552 tree result = NULL_TREE;
13553
13554 STRIP_NOPS (arg1);
13555 STRIP_NOPS (arg2);
13556
13557 /* To proceed, MPFR must exactly represent the target floating point
13558 format, which only happens when the target base equals two. */
13559 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13560 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13561 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13562 {
13563 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13564 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13565
13566 if (real_isfinite (ra1) && real_isfinite (ra2))
13567 {
13568 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13569 const int prec = fmt->p;
13570 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13571 int inexact;
13572 mpfr_t m1, m2;
13573
13574 mpfr_inits2 (prec, m1, m2, NULL);
13575 mpfr_from_real (m1, ra1, GMP_RNDN);
13576 mpfr_from_real (m2, ra2, GMP_RNDN);
13577 mpfr_clear_flags ();
13578 inexact = func (m1, m1, m2, rnd);
13579 result = do_mpfr_ckconv (m1, type, inexact);
13580 mpfr_clears (m1, m2, NULL);
13581 }
13582 }
13583
13584 return result;
13585 }
13586
13587 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13588 FUNC on it and return the resulting value as a tree with type TYPE.
13589 The mpfr precision is set to the precision of TYPE. We assume that
13590 function FUNC returns zero if the result could be calculated
13591 exactly within the requested precision. */
13592
13593 static tree
13594 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13595 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13596 {
13597 tree result = NULL_TREE;
13598
13599 STRIP_NOPS (arg1);
13600 STRIP_NOPS (arg2);
13601 STRIP_NOPS (arg3);
13602
13603 /* To proceed, MPFR must exactly represent the target floating point
13604 format, which only happens when the target base equals two. */
13605 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13606 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13607 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13608 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13609 {
13610 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13611 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13612 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13613
13614 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13615 {
13616 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13617 const int prec = fmt->p;
13618 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13619 int inexact;
13620 mpfr_t m1, m2, m3;
13621
13622 mpfr_inits2 (prec, m1, m2, m3, NULL);
13623 mpfr_from_real (m1, ra1, GMP_RNDN);
13624 mpfr_from_real (m2, ra2, GMP_RNDN);
13625 mpfr_from_real (m3, ra3, GMP_RNDN);
13626 mpfr_clear_flags ();
13627 inexact = func (m1, m1, m2, m3, rnd);
13628 result = do_mpfr_ckconv (m1, type, inexact);
13629 mpfr_clears (m1, m2, m3, NULL);
13630 }
13631 }
13632
13633 return result;
13634 }
13635
13636 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13637 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13638 If ARG_SINP and ARG_COSP are NULL then the result is returned
13639 as a complex value.
13640 The type is taken from the type of ARG and is used for setting the
13641 precision of the calculation and results. */
13642
13643 static tree
13644 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13645 {
13646 tree const type = TREE_TYPE (arg);
13647 tree result = NULL_TREE;
13648
13649 STRIP_NOPS (arg);
13650
13651 /* To proceed, MPFR must exactly represent the target floating point
13652 format, which only happens when the target base equals two. */
13653 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13654 && TREE_CODE (arg) == REAL_CST
13655 && !TREE_OVERFLOW (arg))
13656 {
13657 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13658
13659 if (real_isfinite (ra))
13660 {
13661 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13662 const int prec = fmt->p;
13663 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13664 tree result_s, result_c;
13665 int inexact;
13666 mpfr_t m, ms, mc;
13667
13668 mpfr_inits2 (prec, m, ms, mc, NULL);
13669 mpfr_from_real (m, ra, GMP_RNDN);
13670 mpfr_clear_flags ();
13671 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13672 result_s = do_mpfr_ckconv (ms, type, inexact);
13673 result_c = do_mpfr_ckconv (mc, type, inexact);
13674 mpfr_clears (m, ms, mc, NULL);
13675 if (result_s && result_c)
13676 {
13677 /* If we are to return in a complex value do so. */
13678 if (!arg_sinp && !arg_cosp)
13679 return build_complex (build_complex_type (type),
13680 result_c, result_s);
13681
13682 /* Dereference the sin/cos pointer arguments. */
13683 arg_sinp = build_fold_indirect_ref (arg_sinp);
13684 arg_cosp = build_fold_indirect_ref (arg_cosp);
13685 /* Proceed if valid pointer type were passed in. */
13686 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13687 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13688 {
13689 /* Set the values. */
13690 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13691 result_s);
13692 TREE_SIDE_EFFECTS (result_s) = 1;
13693 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13694 result_c);
13695 TREE_SIDE_EFFECTS (result_c) = 1;
13696 /* Combine the assignments into a compound expr. */
13697 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13698 result_s, result_c));
13699 }
13700 }
13701 }
13702 }
13703 return result;
13704 }
13705
13706 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13707 two-argument mpfr order N Bessel function FUNC on them and return
13708 the resulting value as a tree with type TYPE. The mpfr precision
13709 is set to the precision of TYPE. We assume that function FUNC
13710 returns zero if the result could be calculated exactly within the
13711 requested precision. */
13712 static tree
13713 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13714 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13715 const REAL_VALUE_TYPE *min, bool inclusive)
13716 {
13717 tree result = NULL_TREE;
13718
13719 STRIP_NOPS (arg1);
13720 STRIP_NOPS (arg2);
13721
13722 /* To proceed, MPFR must exactly represent the target floating point
13723 format, which only happens when the target base equals two. */
13724 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13725 && tree_fits_shwi_p (arg1)
13726 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13727 {
13728 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13729 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13730
13731 if (n == (long)n
13732 && real_isfinite (ra)
13733 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13734 {
13735 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13736 const int prec = fmt->p;
13737 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13738 int inexact;
13739 mpfr_t m;
13740
13741 mpfr_init2 (m, prec);
13742 mpfr_from_real (m, ra, GMP_RNDN);
13743 mpfr_clear_flags ();
13744 inexact = func (m, n, m, rnd);
13745 result = do_mpfr_ckconv (m, type, inexact);
13746 mpfr_clear (m);
13747 }
13748 }
13749
13750 return result;
13751 }
13752
13753 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13754 the pointer *(ARG_QUO) and return the result. The type is taken
13755 from the type of ARG0 and is used for setting the precision of the
13756 calculation and results. */
13757
13758 static tree
13759 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13760 {
13761 tree const type = TREE_TYPE (arg0);
13762 tree result = NULL_TREE;
13763
13764 STRIP_NOPS (arg0);
13765 STRIP_NOPS (arg1);
13766
13767 /* To proceed, MPFR must exactly represent the target floating point
13768 format, which only happens when the target base equals two. */
13769 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13770 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13771 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13772 {
13773 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13774 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13775
13776 if (real_isfinite (ra0) && real_isfinite (ra1))
13777 {
13778 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13779 const int prec = fmt->p;
13780 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13781 tree result_rem;
13782 long integer_quo;
13783 mpfr_t m0, m1;
13784
13785 mpfr_inits2 (prec, m0, m1, NULL);
13786 mpfr_from_real (m0, ra0, GMP_RNDN);
13787 mpfr_from_real (m1, ra1, GMP_RNDN);
13788 mpfr_clear_flags ();
13789 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13790 /* Remquo is independent of the rounding mode, so pass
13791 inexact=0 to do_mpfr_ckconv(). */
13792 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13793 mpfr_clears (m0, m1, NULL);
13794 if (result_rem)
13795 {
13796 /* MPFR calculates quo in the host's long so it may
13797 return more bits in quo than the target int can hold
13798 if sizeof(host long) > sizeof(target int). This can
13799 happen even for native compilers in LP64 mode. In
13800 these cases, modulo the quo value with the largest
13801 number that the target int can hold while leaving one
13802 bit for the sign. */
13803 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13804 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13805
13806 /* Dereference the quo pointer argument. */
13807 arg_quo = build_fold_indirect_ref (arg_quo);
13808 /* Proceed iff a valid pointer type was passed in. */
13809 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13810 {
13811 /* Set the value. */
13812 tree result_quo
13813 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13814 build_int_cst (TREE_TYPE (arg_quo),
13815 integer_quo));
13816 TREE_SIDE_EFFECTS (result_quo) = 1;
13817 /* Combine the quo assignment with the rem. */
13818 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13819 result_quo, result_rem));
13820 }
13821 }
13822 }
13823 }
13824 return result;
13825 }
13826
13827 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13828 resulting value as a tree with type TYPE. The mpfr precision is
13829 set to the precision of TYPE. We assume that this mpfr function
13830 returns zero if the result could be calculated exactly within the
13831 requested precision. In addition, the integer pointer represented
13832 by ARG_SG will be dereferenced and set to the appropriate signgam
13833 (-1,1) value. */
13834
13835 static tree
13836 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13837 {
13838 tree result = NULL_TREE;
13839
13840 STRIP_NOPS (arg);
13841
13842 /* To proceed, MPFR must exactly represent the target floating point
13843 format, which only happens when the target base equals two. Also
13844 verify ARG is a constant and that ARG_SG is an int pointer. */
13845 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13846 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13847 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13848 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13849 {
13850 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13851
13852 /* In addition to NaN and Inf, the argument cannot be zero or a
13853 negative integer. */
13854 if (real_isfinite (ra)
13855 && ra->cl != rvc_zero
13856 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13857 {
13858 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13859 const int prec = fmt->p;
13860 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13861 int inexact, sg;
13862 mpfr_t m;
13863 tree result_lg;
13864
13865 mpfr_init2 (m, prec);
13866 mpfr_from_real (m, ra, GMP_RNDN);
13867 mpfr_clear_flags ();
13868 inexact = mpfr_lgamma (m, &sg, m, rnd);
13869 result_lg = do_mpfr_ckconv (m, type, inexact);
13870 mpfr_clear (m);
13871 if (result_lg)
13872 {
13873 tree result_sg;
13874
13875 /* Dereference the arg_sg pointer argument. */
13876 arg_sg = build_fold_indirect_ref (arg_sg);
13877 /* Assign the signgam value into *arg_sg. */
13878 result_sg = fold_build2 (MODIFY_EXPR,
13879 TREE_TYPE (arg_sg), arg_sg,
13880 build_int_cst (TREE_TYPE (arg_sg), sg));
13881 TREE_SIDE_EFFECTS (result_sg) = 1;
13882 /* Combine the signgam assignment with the lgamma result. */
13883 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13884 result_sg, result_lg));
13885 }
13886 }
13887 }
13888
13889 return result;
13890 }
13891
13892 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13893 function FUNC on it and return the resulting value as a tree with
13894 type TYPE. The mpfr precision is set to the precision of TYPE. We
13895 assume that function FUNC returns zero if the result could be
13896 calculated exactly within the requested precision. */
13897
13898 static tree
13899 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13900 {
13901 tree result = NULL_TREE;
13902
13903 STRIP_NOPS (arg);
13904
13905 /* To proceed, MPFR must exactly represent the target floating point
13906 format, which only happens when the target base equals two. */
13907 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13908 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13909 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13910 {
13911 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13912 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13913
13914 if (real_isfinite (re) && real_isfinite (im))
13915 {
13916 const struct real_format *const fmt =
13917 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13918 const int prec = fmt->p;
13919 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13920 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13921 int inexact;
13922 mpc_t m;
13923
13924 mpc_init2 (m, prec);
13925 mpfr_from_real (mpc_realref (m), re, rnd);
13926 mpfr_from_real (mpc_imagref (m), im, rnd);
13927 mpfr_clear_flags ();
13928 inexact = func (m, m, crnd);
13929 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13930 mpc_clear (m);
13931 }
13932 }
13933
13934 return result;
13935 }
13936
13937 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13938 mpc function FUNC on it and return the resulting value as a tree
13939 with type TYPE. The mpfr precision is set to the precision of
13940 TYPE. We assume that function FUNC returns zero if the result
13941 could be calculated exactly within the requested precision. If
13942 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13943 in the arguments and/or results. */
13944
13945 tree
13946 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13947 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13948 {
13949 tree result = NULL_TREE;
13950
13951 STRIP_NOPS (arg0);
13952 STRIP_NOPS (arg1);
13953
13954 /* To proceed, MPFR must exactly represent the target floating point
13955 format, which only happens when the target base equals two. */
13956 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13957 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13958 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13959 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13960 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13961 {
13962 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13963 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13964 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13965 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13966
13967 if (do_nonfinite
13968 || (real_isfinite (re0) && real_isfinite (im0)
13969 && real_isfinite (re1) && real_isfinite (im1)))
13970 {
13971 const struct real_format *const fmt =
13972 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13973 const int prec = fmt->p;
13974 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13975 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13976 int inexact;
13977 mpc_t m0, m1;
13978
13979 mpc_init2 (m0, prec);
13980 mpc_init2 (m1, prec);
13981 mpfr_from_real (mpc_realref (m0), re0, rnd);
13982 mpfr_from_real (mpc_imagref (m0), im0, rnd);
13983 mpfr_from_real (mpc_realref (m1), re1, rnd);
13984 mpfr_from_real (mpc_imagref (m1), im1, rnd);
13985 mpfr_clear_flags ();
13986 inexact = func (m0, m0, m1, crnd);
13987 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13988 mpc_clear (m0);
13989 mpc_clear (m1);
13990 }
13991 }
13992
13993 return result;
13994 }
13995
13996 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13997 a normal call should be emitted rather than expanding the function
13998 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13999
14000 static tree
14001 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14002 {
14003 int nargs = gimple_call_num_args (stmt);
14004
14005 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14006 (nargs > 0
14007 ? gimple_call_arg_ptr (stmt, 0)
14008 : &error_mark_node), fcode);
14009 }
14010
14011 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14012 a normal call should be emitted rather than expanding the function
14013 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14014 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14015 passed as second argument. */
14016
14017 tree
14018 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14019 enum built_in_function fcode)
14020 {
14021 int nargs = gimple_call_num_args (stmt);
14022
14023 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14024 (nargs > 0
14025 ? gimple_call_arg_ptr (stmt, 0)
14026 : &error_mark_node), maxlen, fcode);
14027 }
14028
14029 /* Builtins with folding operations that operate on "..." arguments
14030 need special handling; we need to store the arguments in a convenient
14031 data structure before attempting any folding. Fortunately there are
14032 only a few builtins that fall into this category. FNDECL is the
14033 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14034 result of the function call is ignored. */
14035
14036 static tree
14037 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14038 bool ignore ATTRIBUTE_UNUSED)
14039 {
14040 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14041 tree ret = NULL_TREE;
14042
14043 switch (fcode)
14044 {
14045 case BUILT_IN_SPRINTF_CHK:
14046 case BUILT_IN_VSPRINTF_CHK:
14047 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14048 break;
14049
14050 case BUILT_IN_SNPRINTF_CHK:
14051 case BUILT_IN_VSNPRINTF_CHK:
14052 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14053
14054 default:
14055 break;
14056 }
14057 if (ret)
14058 {
14059 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14060 TREE_NO_WARNING (ret) = 1;
14061 return ret;
14062 }
14063 return NULL_TREE;
14064 }
14065
14066 /* A wrapper function for builtin folding that prevents warnings for
14067 "statement without effect" and the like, caused by removing the
14068 call node earlier than the warning is generated. */
14069
14070 tree
14071 fold_call_stmt (gimple stmt, bool ignore)
14072 {
14073 tree ret = NULL_TREE;
14074 tree fndecl = gimple_call_fndecl (stmt);
14075 location_t loc = gimple_location (stmt);
14076 if (fndecl
14077 && TREE_CODE (fndecl) == FUNCTION_DECL
14078 && DECL_BUILT_IN (fndecl)
14079 && !gimple_call_va_arg_pack_p (stmt))
14080 {
14081 int nargs = gimple_call_num_args (stmt);
14082 tree *args = (nargs > 0
14083 ? gimple_call_arg_ptr (stmt, 0)
14084 : &error_mark_node);
14085
14086 if (avoid_folding_inline_builtin (fndecl))
14087 return NULL_TREE;
14088 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14089 {
14090 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14091 }
14092 else
14093 {
14094 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14095 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14096 if (!ret)
14097 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14098 if (ret)
14099 {
14100 /* Propagate location information from original call to
14101 expansion of builtin. Otherwise things like
14102 maybe_emit_chk_warning, that operate on the expansion
14103 of a builtin, will use the wrong location information. */
14104 if (gimple_has_location (stmt))
14105 {
14106 tree realret = ret;
14107 if (TREE_CODE (ret) == NOP_EXPR)
14108 realret = TREE_OPERAND (ret, 0);
14109 if (CAN_HAVE_LOCATION_P (realret)
14110 && !EXPR_HAS_LOCATION (realret))
14111 SET_EXPR_LOCATION (realret, loc);
14112 return realret;
14113 }
14114 return ret;
14115 }
14116 }
14117 }
14118 return NULL_TREE;
14119 }
14120
14121 /* Look up the function in builtin_decl that corresponds to DECL
14122 and set ASMSPEC as its user assembler name. DECL must be a
14123 function decl that declares a builtin. */
14124
14125 void
14126 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14127 {
14128 tree builtin;
14129 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14130 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14131 && asmspec != 0);
14132
14133 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14134 set_user_assembler_name (builtin, asmspec);
14135 switch (DECL_FUNCTION_CODE (decl))
14136 {
14137 case BUILT_IN_MEMCPY:
14138 init_block_move_fn (asmspec);
14139 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14140 break;
14141 case BUILT_IN_MEMSET:
14142 init_block_clear_fn (asmspec);
14143 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14144 break;
14145 case BUILT_IN_MEMMOVE:
14146 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14147 break;
14148 case BUILT_IN_MEMCMP:
14149 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14150 break;
14151 case BUILT_IN_ABORT:
14152 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14153 break;
14154 case BUILT_IN_FFS:
14155 if (INT_TYPE_SIZE < BITS_PER_WORD)
14156 {
14157 set_user_assembler_libfunc ("ffs", asmspec);
14158 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14159 MODE_INT, 0), "ffs");
14160 }
14161 break;
14162 default:
14163 break;
14164 }
14165 }
14166
14167 /* Return true if DECL is a builtin that expands to a constant or similarly
14168 simple code. */
14169 bool
14170 is_simple_builtin (tree decl)
14171 {
14172 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14173 switch (DECL_FUNCTION_CODE (decl))
14174 {
14175 /* Builtins that expand to constants. */
14176 case BUILT_IN_CONSTANT_P:
14177 case BUILT_IN_EXPECT:
14178 case BUILT_IN_OBJECT_SIZE:
14179 case BUILT_IN_UNREACHABLE:
14180 /* Simple register moves or loads from stack. */
14181 case BUILT_IN_ASSUME_ALIGNED:
14182 case BUILT_IN_RETURN_ADDRESS:
14183 case BUILT_IN_EXTRACT_RETURN_ADDR:
14184 case BUILT_IN_FROB_RETURN_ADDR:
14185 case BUILT_IN_RETURN:
14186 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14187 case BUILT_IN_FRAME_ADDRESS:
14188 case BUILT_IN_VA_END:
14189 case BUILT_IN_STACK_SAVE:
14190 case BUILT_IN_STACK_RESTORE:
14191 /* Exception state returns or moves registers around. */
14192 case BUILT_IN_EH_FILTER:
14193 case BUILT_IN_EH_POINTER:
14194 case BUILT_IN_EH_COPY_VALUES:
14195 return true;
14196
14197 default:
14198 return false;
14199 }
14200
14201 return false;
14202 }
14203
14204 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14205 most probably expanded inline into reasonably simple code. This is a
14206 superset of is_simple_builtin. */
14207 bool
14208 is_inexpensive_builtin (tree decl)
14209 {
14210 if (!decl)
14211 return false;
14212 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14213 return true;
14214 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14215 switch (DECL_FUNCTION_CODE (decl))
14216 {
14217 case BUILT_IN_ABS:
14218 case BUILT_IN_ALLOCA:
14219 case BUILT_IN_ALLOCA_WITH_ALIGN:
14220 case BUILT_IN_BSWAP16:
14221 case BUILT_IN_BSWAP32:
14222 case BUILT_IN_BSWAP64:
14223 case BUILT_IN_CLZ:
14224 case BUILT_IN_CLZIMAX:
14225 case BUILT_IN_CLZL:
14226 case BUILT_IN_CLZLL:
14227 case BUILT_IN_CTZ:
14228 case BUILT_IN_CTZIMAX:
14229 case BUILT_IN_CTZL:
14230 case BUILT_IN_CTZLL:
14231 case BUILT_IN_FFS:
14232 case BUILT_IN_FFSIMAX:
14233 case BUILT_IN_FFSL:
14234 case BUILT_IN_FFSLL:
14235 case BUILT_IN_IMAXABS:
14236 case BUILT_IN_FINITE:
14237 case BUILT_IN_FINITEF:
14238 case BUILT_IN_FINITEL:
14239 case BUILT_IN_FINITED32:
14240 case BUILT_IN_FINITED64:
14241 case BUILT_IN_FINITED128:
14242 case BUILT_IN_FPCLASSIFY:
14243 case BUILT_IN_ISFINITE:
14244 case BUILT_IN_ISINF_SIGN:
14245 case BUILT_IN_ISINF:
14246 case BUILT_IN_ISINFF:
14247 case BUILT_IN_ISINFL:
14248 case BUILT_IN_ISINFD32:
14249 case BUILT_IN_ISINFD64:
14250 case BUILT_IN_ISINFD128:
14251 case BUILT_IN_ISNAN:
14252 case BUILT_IN_ISNANF:
14253 case BUILT_IN_ISNANL:
14254 case BUILT_IN_ISNAND32:
14255 case BUILT_IN_ISNAND64:
14256 case BUILT_IN_ISNAND128:
14257 case BUILT_IN_ISNORMAL:
14258 case BUILT_IN_ISGREATER:
14259 case BUILT_IN_ISGREATEREQUAL:
14260 case BUILT_IN_ISLESS:
14261 case BUILT_IN_ISLESSEQUAL:
14262 case BUILT_IN_ISLESSGREATER:
14263 case BUILT_IN_ISUNORDERED:
14264 case BUILT_IN_VA_ARG_PACK:
14265 case BUILT_IN_VA_ARG_PACK_LEN:
14266 case BUILT_IN_VA_COPY:
14267 case BUILT_IN_TRAP:
14268 case BUILT_IN_SAVEREGS:
14269 case BUILT_IN_POPCOUNTL:
14270 case BUILT_IN_POPCOUNTLL:
14271 case BUILT_IN_POPCOUNTIMAX:
14272 case BUILT_IN_POPCOUNT:
14273 case BUILT_IN_PARITYL:
14274 case BUILT_IN_PARITYLL:
14275 case BUILT_IN_PARITYIMAX:
14276 case BUILT_IN_PARITY:
14277 case BUILT_IN_LABS:
14278 case BUILT_IN_LLABS:
14279 case BUILT_IN_PREFETCH:
14280 return true;
14281
14282 default:
14283 return is_simple_builtin (decl);
14284 }
14285
14286 return false;
14287 }