re PR other/58712 (issues found by --enable-checking=valgrind)
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
62
63
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
65
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
70
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
74
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
77 {
78 #include "builtins.def"
79 };
80 #undef DEF_BUILTIN
81
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
85
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
88
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree, tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static void expand_builtin_update_setjmp_buf (rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strcmp (tree, rtx);
122 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
123 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_expect (location_t, tree, tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree fold_builtin_nan (tree, tree, int);
148 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
149 static bool validate_arg (const_tree, enum tree_code code);
150 static bool integer_valued_real_p (tree);
151 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
152 static bool readonly_data_expr (tree);
153 static rtx expand_builtin_fabs (tree, rtx, rtx);
154 static rtx expand_builtin_signbit (tree, rtx);
155 static tree fold_builtin_sqrt (location_t, tree, tree);
156 static tree fold_builtin_cbrt (location_t, tree, tree);
157 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
159 static tree fold_builtin_cos (location_t, tree, tree, tree);
160 static tree fold_builtin_cosh (location_t, tree, tree, tree);
161 static tree fold_builtin_tan (tree, tree);
162 static tree fold_builtin_trunc (location_t, tree, tree);
163 static tree fold_builtin_floor (location_t, tree, tree);
164 static tree fold_builtin_ceil (location_t, tree, tree);
165 static tree fold_builtin_round (location_t, tree, tree);
166 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
167 static tree fold_builtin_bitop (tree, tree);
168 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
169 static tree fold_builtin_strchr (location_t, tree, tree, tree);
170 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
172 static tree fold_builtin_strcmp (location_t, tree, tree);
173 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
174 static tree fold_builtin_signbit (location_t, tree, tree);
175 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_isascii (location_t, tree);
177 static tree fold_builtin_toascii (location_t, tree);
178 static tree fold_builtin_isdigit (location_t, tree);
179 static tree fold_builtin_fabs (location_t, tree, tree);
180 static tree fold_builtin_abs (location_t, tree, tree);
181 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
182 enum tree_code);
183 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
184 static tree fold_builtin_0 (location_t, tree, bool);
185 static tree fold_builtin_1 (location_t, tree, tree, bool);
186 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
187 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
188 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
189 static tree fold_builtin_varargs (location_t, tree, tree, bool);
190
191 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
192 static tree fold_builtin_strstr (location_t, tree, tree, tree);
193 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
194 static tree fold_builtin_strcat (location_t, tree, tree);
195 static tree fold_builtin_strncat (location_t, tree, tree, tree);
196 static tree fold_builtin_strspn (location_t, tree, tree);
197 static tree fold_builtin_strcspn (location_t, tree, tree);
198 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
199 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
200
201 static rtx expand_builtin_object_size (tree);
202 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
203 enum built_in_function);
204 static void maybe_emit_chk_warning (tree, enum built_in_function);
205 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
206 static void maybe_emit_free_warning (tree);
207 static tree fold_builtin_object_size (tree, tree);
208 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
209 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
210 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
211 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
212 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
213 enum built_in_function);
214 static bool init_target_chars (void);
215
216 static unsigned HOST_WIDE_INT target_newline;
217 static unsigned HOST_WIDE_INT target_percent;
218 static unsigned HOST_WIDE_INT target_c;
219 static unsigned HOST_WIDE_INT target_s;
220 static char target_percent_c[3];
221 static char target_percent_s[3];
222 static char target_percent_s_newline[4];
223 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_arg2 (tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_arg3 (tree, tree, tree, tree,
228 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
229 static tree do_mpfr_sincos (tree, tree, tree);
230 static tree do_mpfr_bessel_n (tree, tree, tree,
231 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_remquo (tree, tree, tree);
234 static tree do_mpfr_lgamma_r (tree, tree, tree);
235 static void expand_builtin_sync_synchronize (void);
236
237 /* Return true if NAME starts with __builtin_ or __sync_. */
238
239 static bool
240 is_builtin_name (const char *name)
241 {
242 if (strncmp (name, "__builtin_", 10) == 0)
243 return true;
244 if (strncmp (name, "__sync_", 7) == 0)
245 return true;
246 if (strncmp (name, "__atomic_", 9) == 0)
247 return true;
248 if (flag_enable_cilkplus
249 && (!strcmp (name, "__cilkrts_detach")
250 || !strcmp (name, "__cilkrts_pop_frame")))
251 return true;
252 return false;
253 }
254
255
256 /* Return true if DECL is a function symbol representing a built-in. */
257
258 bool
259 is_builtin_fn (tree decl)
260 {
261 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
262 }
263
264 /* By default we assume that c99 functions are present at the runtime,
265 but sincos is not. */
266 bool
267 default_libc_has_function (enum function_class fn_class)
268 {
269 if (fn_class == function_c94
270 || fn_class == function_c99_misc
271 || fn_class == function_c99_math_complex)
272 return true;
273
274 return false;
275 }
276
277 bool
278 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
279 {
280 return true;
281 }
282
283 bool
284 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
285 {
286 return false;
287 }
288
289 /* Return true if NODE should be considered for inline expansion regardless
290 of the optimization level. This means whenever a function is invoked with
291 its "internal" name, which normally contains the prefix "__builtin". */
292
293 static bool
294 called_as_built_in (tree node)
295 {
296 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
297 we want the name used to call the function, not the name it
298 will have. */
299 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
300 return is_builtin_name (name);
301 }
302
303 /* Compute values M and N such that M divides (address of EXP - N) and such
304 that N < M. If these numbers can be determined, store M in alignp and N in
305 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
306 *alignp and any bit-offset to *bitposp.
307
308 Note that the address (and thus the alignment) computed here is based
309 on the address to which a symbol resolves, whereas DECL_ALIGN is based
310 on the address at which an object is actually located. These two
311 addresses are not always the same. For example, on ARM targets,
312 the address &foo of a Thumb function foo() has the lowest bit set,
313 whereas foo() itself starts on an even address.
314
315 If ADDR_P is true we are taking the address of the memory reference EXP
316 and thus cannot rely on the access taking place. */
317
318 static bool
319 get_object_alignment_2 (tree exp, unsigned int *alignp,
320 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
321 {
322 HOST_WIDE_INT bitsize, bitpos;
323 tree offset;
324 enum machine_mode mode;
325 int unsignedp, volatilep;
326 unsigned int align = BITS_PER_UNIT;
327 bool known_alignment = false;
328
329 /* Get the innermost object and the constant (bitpos) and possibly
330 variable (offset) offset of the access. */
331 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
332 &mode, &unsignedp, &volatilep, true);
333
334 /* Extract alignment information from the innermost object and
335 possibly adjust bitpos and offset. */
336 if (TREE_CODE (exp) == FUNCTION_DECL)
337 {
338 /* Function addresses can encode extra information besides their
339 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
340 allows the low bit to be used as a virtual bit, we know
341 that the address itself must be at least 2-byte aligned. */
342 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
343 align = 2 * BITS_PER_UNIT;
344 }
345 else if (TREE_CODE (exp) == LABEL_DECL)
346 ;
347 else if (TREE_CODE (exp) == CONST_DECL)
348 {
349 /* The alignment of a CONST_DECL is determined by its initializer. */
350 exp = DECL_INITIAL (exp);
351 align = TYPE_ALIGN (TREE_TYPE (exp));
352 #ifdef CONSTANT_ALIGNMENT
353 if (CONSTANT_CLASS_P (exp))
354 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
355 #endif
356 known_alignment = true;
357 }
358 else if (DECL_P (exp))
359 {
360 align = DECL_ALIGN (exp);
361 known_alignment = true;
362 }
363 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
364 {
365 align = TYPE_ALIGN (TREE_TYPE (exp));
366 }
367 else if (TREE_CODE (exp) == INDIRECT_REF
368 || TREE_CODE (exp) == MEM_REF
369 || TREE_CODE (exp) == TARGET_MEM_REF)
370 {
371 tree addr = TREE_OPERAND (exp, 0);
372 unsigned ptr_align;
373 unsigned HOST_WIDE_INT ptr_bitpos;
374
375 if (TREE_CODE (addr) == BIT_AND_EXPR
376 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
377 {
378 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
379 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
380 align *= BITS_PER_UNIT;
381 addr = TREE_OPERAND (addr, 0);
382 }
383
384 known_alignment
385 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
386 align = MAX (ptr_align, align);
387
388 /* The alignment of the pointer operand in a TARGET_MEM_REF
389 has to take the variable offset parts into account. */
390 if (TREE_CODE (exp) == TARGET_MEM_REF)
391 {
392 if (TMR_INDEX (exp))
393 {
394 unsigned HOST_WIDE_INT step = 1;
395 if (TMR_STEP (exp))
396 step = TREE_INT_CST_LOW (TMR_STEP (exp));
397 align = MIN (align, (step & -step) * BITS_PER_UNIT);
398 }
399 if (TMR_INDEX2 (exp))
400 align = BITS_PER_UNIT;
401 known_alignment = false;
402 }
403
404 /* When EXP is an actual memory reference then we can use
405 TYPE_ALIGN of a pointer indirection to derive alignment.
406 Do so only if get_pointer_alignment_1 did not reveal absolute
407 alignment knowledge and if using that alignment would
408 improve the situation. */
409 if (!addr_p && !known_alignment
410 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
411 align = TYPE_ALIGN (TREE_TYPE (exp));
412 else
413 {
414 /* Else adjust bitpos accordingly. */
415 bitpos += ptr_bitpos;
416 if (TREE_CODE (exp) == MEM_REF
417 || TREE_CODE (exp) == TARGET_MEM_REF)
418 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
419 }
420 }
421 else if (TREE_CODE (exp) == STRING_CST)
422 {
423 /* STRING_CST are the only constant objects we allow to be not
424 wrapped inside a CONST_DECL. */
425 align = TYPE_ALIGN (TREE_TYPE (exp));
426 #ifdef CONSTANT_ALIGNMENT
427 if (CONSTANT_CLASS_P (exp))
428 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
429 #endif
430 known_alignment = true;
431 }
432
433 /* If there is a non-constant offset part extract the maximum
434 alignment that can prevail. */
435 if (offset)
436 {
437 unsigned int trailing_zeros = tree_ctz (offset);
438 if (trailing_zeros < HOST_BITS_PER_INT)
439 {
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
443 }
444 }
445
446 *alignp = align;
447 *bitposp = bitpos & (*alignp - 1);
448 return known_alignment;
449 }
450
451 /* For a memory reference expression EXP compute values M and N such that M
452 divides (&EXP - N) and such that N < M. If these numbers can be determined,
453 store M in alignp and N in *BITPOSP and return true. Otherwise return false
454 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
455
456 bool
457 get_object_alignment_1 (tree exp, unsigned int *alignp,
458 unsigned HOST_WIDE_INT *bitposp)
459 {
460 return get_object_alignment_2 (exp, alignp, bitposp, false);
461 }
462
463 /* Return the alignment in bits of EXP, an object. */
464
465 unsigned int
466 get_object_alignment (tree exp)
467 {
468 unsigned HOST_WIDE_INT bitpos = 0;
469 unsigned int align;
470
471 get_object_alignment_1 (exp, &align, &bitpos);
472
473 /* align and bitpos now specify known low bits of the pointer.
474 ptr & (align - 1) == bitpos. */
475
476 if (bitpos != 0)
477 align = (bitpos & -bitpos);
478 return align;
479 }
480
481 /* For a pointer valued expression EXP compute values M and N such that M
482 divides (EXP - N) and such that N < M. If these numbers can be determined,
483 store M in alignp and N in *BITPOSP and return true. Return false if
484 the results are just a conservative approximation.
485
486 If EXP is not a pointer, false is returned too. */
487
488 bool
489 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
490 unsigned HOST_WIDE_INT *bitposp)
491 {
492 STRIP_NOPS (exp);
493
494 if (TREE_CODE (exp) == ADDR_EXPR)
495 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
496 alignp, bitposp, true);
497 else if (TREE_CODE (exp) == SSA_NAME
498 && POINTER_TYPE_P (TREE_TYPE (exp)))
499 {
500 unsigned int ptr_align, ptr_misalign;
501 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
502
503 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
504 {
505 *bitposp = ptr_misalign * BITS_PER_UNIT;
506 *alignp = ptr_align * BITS_PER_UNIT;
507 /* We cannot really tell whether this result is an approximation. */
508 return true;
509 }
510 else
511 {
512 *bitposp = 0;
513 *alignp = BITS_PER_UNIT;
514 return false;
515 }
516 }
517 else if (TREE_CODE (exp) == INTEGER_CST)
518 {
519 *alignp = BIGGEST_ALIGNMENT;
520 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
521 & (BIGGEST_ALIGNMENT - 1));
522 return true;
523 }
524
525 *bitposp = 0;
526 *alignp = BITS_PER_UNIT;
527 return false;
528 }
529
530 /* Return the alignment in bits of EXP, a pointer valued expression.
531 The alignment returned is, by default, the alignment of the thing that
532 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
533
534 Otherwise, look at the expression to see if we can do better, i.e., if the
535 expression is actually pointing at an object whose alignment is tighter. */
536
537 unsigned int
538 get_pointer_alignment (tree exp)
539 {
540 unsigned HOST_WIDE_INT bitpos = 0;
541 unsigned int align;
542
543 get_pointer_alignment_1 (exp, &align, &bitpos);
544
545 /* align and bitpos now specify known low bits of the pointer.
546 ptr & (align - 1) == bitpos. */
547
548 if (bitpos != 0)
549 align = (bitpos & -bitpos);
550
551 return align;
552 }
553
554 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
555 way, because it could contain a zero byte in the middle.
556 TREE_STRING_LENGTH is the size of the character array, not the string.
557
558 ONLY_VALUE should be nonzero if the result is not going to be emitted
559 into the instruction stream and zero if it is going to be expanded.
560 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
561 is returned, otherwise NULL, since
562 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
563 evaluate the side-effects.
564
565 The value returned is of type `ssizetype'.
566
567 Unfortunately, string_constant can't access the values of const char
568 arrays with initializers, so neither can we do so here. */
569
570 tree
571 c_strlen (tree src, int only_value)
572 {
573 tree offset_node;
574 HOST_WIDE_INT offset;
575 int max;
576 const char *ptr;
577 location_t loc;
578
579 STRIP_NOPS (src);
580 if (TREE_CODE (src) == COND_EXPR
581 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
582 {
583 tree len1, len2;
584
585 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
586 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
587 if (tree_int_cst_equal (len1, len2))
588 return len1;
589 }
590
591 if (TREE_CODE (src) == COMPOUND_EXPR
592 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
593 return c_strlen (TREE_OPERAND (src, 1), only_value);
594
595 loc = EXPR_LOC_OR_LOC (src, input_location);
596
597 src = string_constant (src, &offset_node);
598 if (src == 0)
599 return NULL_TREE;
600
601 max = TREE_STRING_LENGTH (src) - 1;
602 ptr = TREE_STRING_POINTER (src);
603
604 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
605 {
606 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
607 compute the offset to the following null if we don't know where to
608 start searching for it. */
609 int i;
610
611 for (i = 0; i < max; i++)
612 if (ptr[i] == 0)
613 return NULL_TREE;
614
615 /* We don't know the starting offset, but we do know that the string
616 has no internal zero bytes. We can assume that the offset falls
617 within the bounds of the string; otherwise, the programmer deserves
618 what he gets. Subtract the offset from the length of the string,
619 and return that. This would perhaps not be valid if we were dealing
620 with named arrays in addition to literal string constants. */
621
622 return size_diffop_loc (loc, size_int (max), offset_node);
623 }
624
625 /* We have a known offset into the string. Start searching there for
626 a null character if we can represent it as a single HOST_WIDE_INT. */
627 if (offset_node == 0)
628 offset = 0;
629 else if (! tree_fits_shwi_p (offset_node))
630 offset = -1;
631 else
632 offset = tree_to_shwi (offset_node);
633
634 /* If the offset is known to be out of bounds, warn, and call strlen at
635 runtime. */
636 if (offset < 0 || offset > max)
637 {
638 /* Suppress multiple warnings for propagated constant strings. */
639 if (! TREE_NO_WARNING (src))
640 {
641 warning_at (loc, 0, "offset outside bounds of constant string");
642 TREE_NO_WARNING (src) = 1;
643 }
644 return NULL_TREE;
645 }
646
647 /* Use strlen to search for the first zero byte. Since any strings
648 constructed with build_string will have nulls appended, we win even
649 if we get handed something like (char[4])"abcd".
650
651 Since OFFSET is our starting index into the string, no further
652 calculation is needed. */
653 return ssize_int (strlen (ptr + offset));
654 }
655
656 /* Return a char pointer for a C string if it is a string constant
657 or sum of string constant and integer constant. */
658
659 static const char *
660 c_getstr (tree src)
661 {
662 tree offset_node;
663
664 src = string_constant (src, &offset_node);
665 if (src == 0)
666 return 0;
667
668 if (offset_node == 0)
669 return TREE_STRING_POINTER (src);
670 else if (!tree_fits_uhwi_p (offset_node)
671 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
672 return 0;
673
674 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
675 }
676
677 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
678 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
679
680 static rtx
681 c_readstr (const char *str, enum machine_mode mode)
682 {
683 HOST_WIDE_INT c[2];
684 HOST_WIDE_INT ch;
685 unsigned int i, j;
686
687 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
688
689 c[0] = 0;
690 c[1] = 0;
691 ch = 1;
692 for (i = 0; i < GET_MODE_SIZE (mode); i++)
693 {
694 j = i;
695 if (WORDS_BIG_ENDIAN)
696 j = GET_MODE_SIZE (mode) - i - 1;
697 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
698 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
699 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
700 j *= BITS_PER_UNIT;
701 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
702
703 if (ch)
704 ch = (unsigned char) str[i];
705 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
706 }
707 return immed_double_const (c[0], c[1], mode);
708 }
709
710 /* Cast a target constant CST to target CHAR and if that value fits into
711 host char type, return zero and put that value into variable pointed to by
712 P. */
713
714 static int
715 target_char_cast (tree cst, char *p)
716 {
717 unsigned HOST_WIDE_INT val, hostval;
718
719 if (TREE_CODE (cst) != INTEGER_CST
720 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
721 return 1;
722
723 val = TREE_INT_CST_LOW (cst);
724 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
725 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
726
727 hostval = val;
728 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
729 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
730
731 if (val != hostval)
732 return 1;
733
734 *p = hostval;
735 return 0;
736 }
737
738 /* Similar to save_expr, but assumes that arbitrary code is not executed
739 in between the multiple evaluations. In particular, we assume that a
740 non-addressable local variable will not be modified. */
741
742 static tree
743 builtin_save_expr (tree exp)
744 {
745 if (TREE_CODE (exp) == SSA_NAME
746 || (TREE_ADDRESSABLE (exp) == 0
747 && (TREE_CODE (exp) == PARM_DECL
748 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
749 return exp;
750
751 return save_expr (exp);
752 }
753
754 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
755 times to get the address of either a higher stack frame, or a return
756 address located within it (depending on FNDECL_CODE). */
757
758 static rtx
759 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
760 {
761 int i;
762
763 #ifdef INITIAL_FRAME_ADDRESS_RTX
764 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
765 #else
766 rtx tem;
767
768 /* For a zero count with __builtin_return_address, we don't care what
769 frame address we return, because target-specific definitions will
770 override us. Therefore frame pointer elimination is OK, and using
771 the soft frame pointer is OK.
772
773 For a nonzero count, or a zero count with __builtin_frame_address,
774 we require a stable offset from the current frame pointer to the
775 previous one, so we must use the hard frame pointer, and
776 we must disable frame pointer elimination. */
777 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
778 tem = frame_pointer_rtx;
779 else
780 {
781 tem = hard_frame_pointer_rtx;
782
783 /* Tell reload not to eliminate the frame pointer. */
784 crtl->accesses_prior_frames = 1;
785 }
786 #endif
787
788 /* Some machines need special handling before we can access
789 arbitrary frames. For example, on the SPARC, we must first flush
790 all register windows to the stack. */
791 #ifdef SETUP_FRAME_ADDRESSES
792 if (count > 0)
793 SETUP_FRAME_ADDRESSES ();
794 #endif
795
796 /* On the SPARC, the return address is not in the frame, it is in a
797 register. There is no way to access it off of the current frame
798 pointer, but it can be accessed off the previous frame pointer by
799 reading the value from the register window save area. */
800 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
801 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
802 count--;
803 #endif
804
805 /* Scan back COUNT frames to the specified frame. */
806 for (i = 0; i < count; i++)
807 {
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810 #ifdef DYNAMIC_CHAIN_ADDRESS
811 tem = DYNAMIC_CHAIN_ADDRESS (tem);
812 #endif
813 tem = memory_address (Pmode, tem);
814 tem = gen_frame_mem (Pmode, tem);
815 tem = copy_to_reg (tem);
816 }
817
818 /* For __builtin_frame_address, return what we've got. But, on
819 the SPARC for example, we may have to add a bias. */
820 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
821 #ifdef FRAME_ADDR_RTX
822 return FRAME_ADDR_RTX (tem);
823 #else
824 return tem;
825 #endif
826
827 /* For __builtin_return_address, get the return address from that frame. */
828 #ifdef RETURN_ADDR_RTX
829 tem = RETURN_ADDR_RTX (count, tem);
830 #else
831 tem = memory_address (Pmode,
832 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
833 tem = gen_frame_mem (Pmode, tem);
834 #endif
835 return tem;
836 }
837
838 /* Alias set used for setjmp buffer. */
839 static alias_set_type setjmp_alias_set = -1;
840
841 /* Construct the leading half of a __builtin_setjmp call. Control will
842 return to RECEIVER_LABEL. This is also called directly by the SJLJ
843 exception handling code. */
844
845 void
846 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
847 {
848 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
849 rtx stack_save;
850 rtx mem;
851
852 if (setjmp_alias_set == -1)
853 setjmp_alias_set = new_alias_set ();
854
855 buf_addr = convert_memory_address (Pmode, buf_addr);
856
857 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
858
859 /* We store the frame pointer and the address of receiver_label in
860 the buffer and use the rest of it for the stack save area, which
861 is machine-dependent. */
862
863 mem = gen_rtx_MEM (Pmode, buf_addr);
864 set_mem_alias_set (mem, setjmp_alias_set);
865 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
866
867 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
868 GET_MODE_SIZE (Pmode))),
869 set_mem_alias_set (mem, setjmp_alias_set);
870
871 emit_move_insn (validize_mem (mem),
872 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
873
874 stack_save = gen_rtx_MEM (sa_mode,
875 plus_constant (Pmode, buf_addr,
876 2 * GET_MODE_SIZE (Pmode)));
877 set_mem_alias_set (stack_save, setjmp_alias_set);
878 emit_stack_save (SAVE_NONLOCAL, &stack_save);
879
880 /* If there is further processing to do, do it. */
881 #ifdef HAVE_builtin_setjmp_setup
882 if (HAVE_builtin_setjmp_setup)
883 emit_insn (gen_builtin_setjmp_setup (buf_addr));
884 #endif
885
886 /* We have a nonlocal label. */
887 cfun->has_nonlocal_label = 1;
888 }
889
890 /* Construct the trailing part of a __builtin_setjmp call. This is
891 also called directly by the SJLJ exception handling code.
892 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
893
894 void
895 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
896 {
897 rtx chain;
898
899 /* Mark the FP as used when we get here, so we have to make sure it's
900 marked as used by this function. */
901 emit_use (hard_frame_pointer_rtx);
902
903 /* Mark the static chain as clobbered here so life information
904 doesn't get messed up for it. */
905 chain = targetm.calls.static_chain (current_function_decl, true);
906 if (chain && REG_P (chain))
907 emit_clobber (chain);
908
909 /* Now put in the code to restore the frame pointer, and argument
910 pointer, if needed. */
911 #ifdef HAVE_nonlocal_goto
912 if (! HAVE_nonlocal_goto)
913 #endif
914 /* First adjust our frame pointer to its actual value. It was
915 previously set to the start of the virtual area corresponding to
916 the stacked variables when we branched here and now needs to be
917 adjusted to the actual hardware fp value.
918
919 Assignments to virtual registers are converted by
920 instantiate_virtual_regs into the corresponding assignment
921 to the underlying register (fp in this case) that makes
922 the original assignment true.
923 So the following insn will actually be decrementing fp by
924 STARTING_FRAME_OFFSET. */
925 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
926
927 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
928 if (fixed_regs[ARG_POINTER_REGNUM])
929 {
930 #ifdef ELIMINABLE_REGS
931 /* If the argument pointer can be eliminated in favor of the
932 frame pointer, we don't need to restore it. We assume here
933 that if such an elimination is present, it can always be used.
934 This is the case on all known machines; if we don't make this
935 assumption, we do unnecessary saving on many machines. */
936 size_t i;
937 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
938
939 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
940 if (elim_regs[i].from == ARG_POINTER_REGNUM
941 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
942 break;
943
944 if (i == ARRAY_SIZE (elim_regs))
945 #endif
946 {
947 /* Now restore our arg pointer from the address at which it
948 was saved in our stack frame. */
949 emit_move_insn (crtl->args.internal_arg_pointer,
950 copy_to_reg (get_arg_pointer_save_area ()));
951 }
952 }
953 #endif
954
955 #ifdef HAVE_builtin_setjmp_receiver
956 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
957 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
958 else
959 #endif
960 #ifdef HAVE_nonlocal_goto_receiver
961 if (HAVE_nonlocal_goto_receiver)
962 emit_insn (gen_nonlocal_goto_receiver ());
963 else
964 #endif
965 { /* Nothing */ }
966
967 /* We must not allow the code we just generated to be reordered by
968 scheduling. Specifically, the update of the frame pointer must
969 happen immediately, not later. Similarly, we must block
970 (frame-related) register values to be used across this code. */
971 emit_insn (gen_blockage ());
972 }
973
974 /* __builtin_longjmp is passed a pointer to an array of five words (not
975 all will be used on all machines). It operates similarly to the C
976 library function of the same name, but is more efficient. Much of
977 the code below is copied from the handling of non-local gotos. */
978
979 static void
980 expand_builtin_longjmp (rtx buf_addr, rtx value)
981 {
982 rtx fp, lab, stack, insn, last;
983 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
984
985 /* DRAP is needed for stack realign if longjmp is expanded to current
986 function */
987 if (SUPPORTS_STACK_ALIGNMENT)
988 crtl->need_drap = true;
989
990 if (setjmp_alias_set == -1)
991 setjmp_alias_set = new_alias_set ();
992
993 buf_addr = convert_memory_address (Pmode, buf_addr);
994
995 buf_addr = force_reg (Pmode, buf_addr);
996
997 /* We require that the user must pass a second argument of 1, because
998 that is what builtin_setjmp will return. */
999 gcc_assert (value == const1_rtx);
1000
1001 last = get_last_insn ();
1002 #ifdef HAVE_builtin_longjmp
1003 if (HAVE_builtin_longjmp)
1004 emit_insn (gen_builtin_longjmp (buf_addr));
1005 else
1006 #endif
1007 {
1008 fp = gen_rtx_MEM (Pmode, buf_addr);
1009 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1010 GET_MODE_SIZE (Pmode)));
1011
1012 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1013 2 * GET_MODE_SIZE (Pmode)));
1014 set_mem_alias_set (fp, setjmp_alias_set);
1015 set_mem_alias_set (lab, setjmp_alias_set);
1016 set_mem_alias_set (stack, setjmp_alias_set);
1017
1018 /* Pick up FP, label, and SP from the block and jump. This code is
1019 from expand_goto in stmt.c; see there for detailed comments. */
1020 #ifdef HAVE_nonlocal_goto
1021 if (HAVE_nonlocal_goto)
1022 /* We have to pass a value to the nonlocal_goto pattern that will
1023 get copied into the static_chain pointer, but it does not matter
1024 what that value is, because builtin_setjmp does not use it. */
1025 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1026 else
1027 #endif
1028 {
1029 lab = copy_to_reg (lab);
1030
1031 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1032 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1033
1034 emit_move_insn (hard_frame_pointer_rtx, fp);
1035 emit_stack_restore (SAVE_NONLOCAL, stack);
1036
1037 emit_use (hard_frame_pointer_rtx);
1038 emit_use (stack_pointer_rtx);
1039 emit_indirect_jump (lab);
1040 }
1041 }
1042
1043 /* Search backwards and mark the jump insn as a non-local goto.
1044 Note that this precludes the use of __builtin_longjmp to a
1045 __builtin_setjmp target in the same function. However, we've
1046 already cautioned the user that these functions are for
1047 internal exception handling use only. */
1048 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1049 {
1050 gcc_assert (insn != last);
1051
1052 if (JUMP_P (insn))
1053 {
1054 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1055 break;
1056 }
1057 else if (CALL_P (insn))
1058 break;
1059 }
1060 }
1061
1062 static inline bool
1063 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1064 {
1065 return (iter->i < iter->n);
1066 }
1067
1068 /* This function validates the types of a function call argument list
1069 against a specified list of tree_codes. If the last specifier is a 0,
1070 that represents an ellipses, otherwise the last specifier must be a
1071 VOID_TYPE. */
1072
1073 static bool
1074 validate_arglist (const_tree callexpr, ...)
1075 {
1076 enum tree_code code;
1077 bool res = 0;
1078 va_list ap;
1079 const_call_expr_arg_iterator iter;
1080 const_tree arg;
1081
1082 va_start (ap, callexpr);
1083 init_const_call_expr_arg_iterator (callexpr, &iter);
1084
1085 do
1086 {
1087 code = (enum tree_code) va_arg (ap, int);
1088 switch (code)
1089 {
1090 case 0:
1091 /* This signifies an ellipses, any further arguments are all ok. */
1092 res = true;
1093 goto end;
1094 case VOID_TYPE:
1095 /* This signifies an endlink, if no arguments remain, return
1096 true, otherwise return false. */
1097 res = !more_const_call_expr_args_p (&iter);
1098 goto end;
1099 default:
1100 /* If no parameters remain or the parameter's code does not
1101 match the specified code, return false. Otherwise continue
1102 checking any remaining arguments. */
1103 arg = next_const_call_expr_arg (&iter);
1104 if (!validate_arg (arg, code))
1105 goto end;
1106 break;
1107 }
1108 }
1109 while (1);
1110
1111 /* We need gotos here since we can only have one VA_CLOSE in a
1112 function. */
1113 end: ;
1114 va_end (ap);
1115
1116 return res;
1117 }
1118
1119 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1120 and the address of the save area. */
1121
1122 static rtx
1123 expand_builtin_nonlocal_goto (tree exp)
1124 {
1125 tree t_label, t_save_area;
1126 rtx r_label, r_save_area, r_fp, r_sp, insn;
1127
1128 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1129 return NULL_RTX;
1130
1131 t_label = CALL_EXPR_ARG (exp, 0);
1132 t_save_area = CALL_EXPR_ARG (exp, 1);
1133
1134 r_label = expand_normal (t_label);
1135 r_label = convert_memory_address (Pmode, r_label);
1136 r_save_area = expand_normal (t_save_area);
1137 r_save_area = convert_memory_address (Pmode, r_save_area);
1138 /* Copy the address of the save location to a register just in case it was
1139 based on the frame pointer. */
1140 r_save_area = copy_to_reg (r_save_area);
1141 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1142 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1143 plus_constant (Pmode, r_save_area,
1144 GET_MODE_SIZE (Pmode)));
1145
1146 crtl->has_nonlocal_goto = 1;
1147
1148 #ifdef HAVE_nonlocal_goto
1149 /* ??? We no longer need to pass the static chain value, afaik. */
1150 if (HAVE_nonlocal_goto)
1151 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1152 else
1153 #endif
1154 {
1155 r_label = copy_to_reg (r_label);
1156
1157 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1158 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1159
1160 /* Restore frame pointer for containing function. */
1161 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1162 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1163
1164 /* USE of hard_frame_pointer_rtx added for consistency;
1165 not clear if really needed. */
1166 emit_use (hard_frame_pointer_rtx);
1167 emit_use (stack_pointer_rtx);
1168
1169 /* If the architecture is using a GP register, we must
1170 conservatively assume that the target function makes use of it.
1171 The prologue of functions with nonlocal gotos must therefore
1172 initialize the GP register to the appropriate value, and we
1173 must then make sure that this value is live at the point
1174 of the jump. (Note that this doesn't necessarily apply
1175 to targets with a nonlocal_goto pattern; they are free
1176 to implement it in their own way. Note also that this is
1177 a no-op if the GP register is a global invariant.) */
1178 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1179 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1180 emit_use (pic_offset_table_rtx);
1181
1182 emit_indirect_jump (r_label);
1183 }
1184
1185 /* Search backwards to the jump insn and mark it as a
1186 non-local goto. */
1187 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1188 {
1189 if (JUMP_P (insn))
1190 {
1191 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1192 break;
1193 }
1194 else if (CALL_P (insn))
1195 break;
1196 }
1197
1198 return const0_rtx;
1199 }
1200
1201 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1202 (not all will be used on all machines) that was passed to __builtin_setjmp.
1203 It updates the stack pointer in that block to correspond to the current
1204 stack pointer. */
1205
1206 static void
1207 expand_builtin_update_setjmp_buf (rtx buf_addr)
1208 {
1209 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1210 rtx stack_save
1211 = gen_rtx_MEM (sa_mode,
1212 memory_address
1213 (sa_mode,
1214 plus_constant (Pmode, buf_addr,
1215 2 * GET_MODE_SIZE (Pmode))));
1216
1217 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1218 }
1219
1220 /* Expand a call to __builtin_prefetch. For a target that does not support
1221 data prefetch, evaluate the memory address argument in case it has side
1222 effects. */
1223
1224 static void
1225 expand_builtin_prefetch (tree exp)
1226 {
1227 tree arg0, arg1, arg2;
1228 int nargs;
1229 rtx op0, op1, op2;
1230
1231 if (!validate_arglist (exp, POINTER_TYPE, 0))
1232 return;
1233
1234 arg0 = CALL_EXPR_ARG (exp, 0);
1235
1236 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1237 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1238 locality). */
1239 nargs = call_expr_nargs (exp);
1240 if (nargs > 1)
1241 arg1 = CALL_EXPR_ARG (exp, 1);
1242 else
1243 arg1 = integer_zero_node;
1244 if (nargs > 2)
1245 arg2 = CALL_EXPR_ARG (exp, 2);
1246 else
1247 arg2 = integer_three_node;
1248
1249 /* Argument 0 is an address. */
1250 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1251
1252 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1253 if (TREE_CODE (arg1) != INTEGER_CST)
1254 {
1255 error ("second argument to %<__builtin_prefetch%> must be a constant");
1256 arg1 = integer_zero_node;
1257 }
1258 op1 = expand_normal (arg1);
1259 /* Argument 1 must be either zero or one. */
1260 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1261 {
1262 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1263 " using zero");
1264 op1 = const0_rtx;
1265 }
1266
1267 /* Argument 2 (locality) must be a compile-time constant int. */
1268 if (TREE_CODE (arg2) != INTEGER_CST)
1269 {
1270 error ("third argument to %<__builtin_prefetch%> must be a constant");
1271 arg2 = integer_zero_node;
1272 }
1273 op2 = expand_normal (arg2);
1274 /* Argument 2 must be 0, 1, 2, or 3. */
1275 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1276 {
1277 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1278 op2 = const0_rtx;
1279 }
1280
1281 #ifdef HAVE_prefetch
1282 if (HAVE_prefetch)
1283 {
1284 struct expand_operand ops[3];
1285
1286 create_address_operand (&ops[0], op0);
1287 create_integer_operand (&ops[1], INTVAL (op1));
1288 create_integer_operand (&ops[2], INTVAL (op2));
1289 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1290 return;
1291 }
1292 #endif
1293
1294 /* Don't do anything with direct references to volatile memory, but
1295 generate code to handle other side effects. */
1296 if (!MEM_P (op0) && side_effects_p (op0))
1297 emit_insn (op0);
1298 }
1299
1300 /* Get a MEM rtx for expression EXP which is the address of an operand
1301 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1302 the maximum length of the block of memory that might be accessed or
1303 NULL if unknown. */
1304
1305 static rtx
1306 get_memory_rtx (tree exp, tree len)
1307 {
1308 tree orig_exp = exp;
1309 rtx addr, mem;
1310
1311 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1312 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1313 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1314 exp = TREE_OPERAND (exp, 0);
1315
1316 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1317 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1318
1319 /* Get an expression we can use to find the attributes to assign to MEM.
1320 First remove any nops. */
1321 while (CONVERT_EXPR_P (exp)
1322 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1323 exp = TREE_OPERAND (exp, 0);
1324
1325 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1326 (as builtin stringops may alias with anything). */
1327 exp = fold_build2 (MEM_REF,
1328 build_array_type (char_type_node,
1329 build_range_type (sizetype,
1330 size_one_node, len)),
1331 exp, build_int_cst (ptr_type_node, 0));
1332
1333 /* If the MEM_REF has no acceptable address, try to get the base object
1334 from the original address we got, and build an all-aliasing
1335 unknown-sized access to that one. */
1336 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1337 set_mem_attributes (mem, exp, 0);
1338 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1339 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1340 0))))
1341 {
1342 exp = build_fold_addr_expr (exp);
1343 exp = fold_build2 (MEM_REF,
1344 build_array_type (char_type_node,
1345 build_range_type (sizetype,
1346 size_zero_node,
1347 NULL)),
1348 exp, build_int_cst (ptr_type_node, 0));
1349 set_mem_attributes (mem, exp, 0);
1350 }
1351 set_mem_alias_set (mem, 0);
1352 return mem;
1353 }
1354 \f
1355 /* Built-in functions to perform an untyped call and return. */
1356
1357 #define apply_args_mode \
1358 (this_target_builtins->x_apply_args_mode)
1359 #define apply_result_mode \
1360 (this_target_builtins->x_apply_result_mode)
1361
1362 /* Return the size required for the block returned by __builtin_apply_args,
1363 and initialize apply_args_mode. */
1364
1365 static int
1366 apply_args_size (void)
1367 {
1368 static int size = -1;
1369 int align;
1370 unsigned int regno;
1371 enum machine_mode mode;
1372
1373 /* The values computed by this function never change. */
1374 if (size < 0)
1375 {
1376 /* The first value is the incoming arg-pointer. */
1377 size = GET_MODE_SIZE (Pmode);
1378
1379 /* The second value is the structure value address unless this is
1380 passed as an "invisible" first argument. */
1381 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1382 size += GET_MODE_SIZE (Pmode);
1383
1384 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1385 if (FUNCTION_ARG_REGNO_P (regno))
1386 {
1387 mode = targetm.calls.get_raw_arg_mode (regno);
1388
1389 gcc_assert (mode != VOIDmode);
1390
1391 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1392 if (size % align != 0)
1393 size = CEIL (size, align) * align;
1394 size += GET_MODE_SIZE (mode);
1395 apply_args_mode[regno] = mode;
1396 }
1397 else
1398 {
1399 apply_args_mode[regno] = VOIDmode;
1400 }
1401 }
1402 return size;
1403 }
1404
1405 /* Return the size required for the block returned by __builtin_apply,
1406 and initialize apply_result_mode. */
1407
1408 static int
1409 apply_result_size (void)
1410 {
1411 static int size = -1;
1412 int align, regno;
1413 enum machine_mode mode;
1414
1415 /* The values computed by this function never change. */
1416 if (size < 0)
1417 {
1418 size = 0;
1419
1420 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1421 if (targetm.calls.function_value_regno_p (regno))
1422 {
1423 mode = targetm.calls.get_raw_result_mode (regno);
1424
1425 gcc_assert (mode != VOIDmode);
1426
1427 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1428 if (size % align != 0)
1429 size = CEIL (size, align) * align;
1430 size += GET_MODE_SIZE (mode);
1431 apply_result_mode[regno] = mode;
1432 }
1433 else
1434 apply_result_mode[regno] = VOIDmode;
1435
1436 /* Allow targets that use untyped_call and untyped_return to override
1437 the size so that machine-specific information can be stored here. */
1438 #ifdef APPLY_RESULT_SIZE
1439 size = APPLY_RESULT_SIZE;
1440 #endif
1441 }
1442 return size;
1443 }
1444
1445 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1446 /* Create a vector describing the result block RESULT. If SAVEP is true,
1447 the result block is used to save the values; otherwise it is used to
1448 restore the values. */
1449
1450 static rtx
1451 result_vector (int savep, rtx result)
1452 {
1453 int regno, size, align, nelts;
1454 enum machine_mode mode;
1455 rtx reg, mem;
1456 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1457
1458 size = nelts = 0;
1459 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1460 if ((mode = apply_result_mode[regno]) != VOIDmode)
1461 {
1462 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1463 if (size % align != 0)
1464 size = CEIL (size, align) * align;
1465 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1466 mem = adjust_address (result, mode, size);
1467 savevec[nelts++] = (savep
1468 ? gen_rtx_SET (VOIDmode, mem, reg)
1469 : gen_rtx_SET (VOIDmode, reg, mem));
1470 size += GET_MODE_SIZE (mode);
1471 }
1472 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1473 }
1474 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1475
1476 /* Save the state required to perform an untyped call with the same
1477 arguments as were passed to the current function. */
1478
1479 static rtx
1480 expand_builtin_apply_args_1 (void)
1481 {
1482 rtx registers, tem;
1483 int size, align, regno;
1484 enum machine_mode mode;
1485 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1486
1487 /* Create a block where the arg-pointer, structure value address,
1488 and argument registers can be saved. */
1489 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1490
1491 /* Walk past the arg-pointer and structure value address. */
1492 size = GET_MODE_SIZE (Pmode);
1493 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1494 size += GET_MODE_SIZE (Pmode);
1495
1496 /* Save each register used in calling a function to the block. */
1497 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1498 if ((mode = apply_args_mode[regno]) != VOIDmode)
1499 {
1500 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1501 if (size % align != 0)
1502 size = CEIL (size, align) * align;
1503
1504 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1505
1506 emit_move_insn (adjust_address (registers, mode, size), tem);
1507 size += GET_MODE_SIZE (mode);
1508 }
1509
1510 /* Save the arg pointer to the block. */
1511 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1512 #ifdef STACK_GROWS_DOWNWARD
1513 /* We need the pointer as the caller actually passed them to us, not
1514 as we might have pretended they were passed. Make sure it's a valid
1515 operand, as emit_move_insn isn't expected to handle a PLUS. */
1516 tem
1517 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1518 NULL_RTX);
1519 #endif
1520 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1521
1522 size = GET_MODE_SIZE (Pmode);
1523
1524 /* Save the structure value address unless this is passed as an
1525 "invisible" first argument. */
1526 if (struct_incoming_value)
1527 {
1528 emit_move_insn (adjust_address (registers, Pmode, size),
1529 copy_to_reg (struct_incoming_value));
1530 size += GET_MODE_SIZE (Pmode);
1531 }
1532
1533 /* Return the address of the block. */
1534 return copy_addr_to_reg (XEXP (registers, 0));
1535 }
1536
1537 /* __builtin_apply_args returns block of memory allocated on
1538 the stack into which is stored the arg pointer, structure
1539 value address, static chain, and all the registers that might
1540 possibly be used in performing a function call. The code is
1541 moved to the start of the function so the incoming values are
1542 saved. */
1543
1544 static rtx
1545 expand_builtin_apply_args (void)
1546 {
1547 /* Don't do __builtin_apply_args more than once in a function.
1548 Save the result of the first call and reuse it. */
1549 if (apply_args_value != 0)
1550 return apply_args_value;
1551 {
1552 /* When this function is called, it means that registers must be
1553 saved on entry to this function. So we migrate the
1554 call to the first insn of this function. */
1555 rtx temp;
1556 rtx seq;
1557
1558 start_sequence ();
1559 temp = expand_builtin_apply_args_1 ();
1560 seq = get_insns ();
1561 end_sequence ();
1562
1563 apply_args_value = temp;
1564
1565 /* Put the insns after the NOTE that starts the function.
1566 If this is inside a start_sequence, make the outer-level insn
1567 chain current, so the code is placed at the start of the
1568 function. If internal_arg_pointer is a non-virtual pseudo,
1569 it needs to be placed after the function that initializes
1570 that pseudo. */
1571 push_topmost_sequence ();
1572 if (REG_P (crtl->args.internal_arg_pointer)
1573 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1574 emit_insn_before (seq, parm_birth_insn);
1575 else
1576 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1577 pop_topmost_sequence ();
1578 return temp;
1579 }
1580 }
1581
1582 /* Perform an untyped call and save the state required to perform an
1583 untyped return of whatever value was returned by the given function. */
1584
1585 static rtx
1586 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1587 {
1588 int size, align, regno;
1589 enum machine_mode mode;
1590 rtx incoming_args, result, reg, dest, src, call_insn;
1591 rtx old_stack_level = 0;
1592 rtx call_fusage = 0;
1593 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1594
1595 arguments = convert_memory_address (Pmode, arguments);
1596
1597 /* Create a block where the return registers can be saved. */
1598 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1599
1600 /* Fetch the arg pointer from the ARGUMENTS block. */
1601 incoming_args = gen_reg_rtx (Pmode);
1602 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1603 #ifndef STACK_GROWS_DOWNWARD
1604 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1605 incoming_args, 0, OPTAB_LIB_WIDEN);
1606 #endif
1607
1608 /* Push a new argument block and copy the arguments. Do not allow
1609 the (potential) memcpy call below to interfere with our stack
1610 manipulations. */
1611 do_pending_stack_adjust ();
1612 NO_DEFER_POP;
1613
1614 /* Save the stack with nonlocal if available. */
1615 #ifdef HAVE_save_stack_nonlocal
1616 if (HAVE_save_stack_nonlocal)
1617 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1618 else
1619 #endif
1620 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1621
1622 /* Allocate a block of memory onto the stack and copy the memory
1623 arguments to the outgoing arguments address. We can pass TRUE
1624 as the 4th argument because we just saved the stack pointer
1625 and will restore it right after the call. */
1626 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1627
1628 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1629 may have already set current_function_calls_alloca to true.
1630 current_function_calls_alloca won't be set if argsize is zero,
1631 so we have to guarantee need_drap is true here. */
1632 if (SUPPORTS_STACK_ALIGNMENT)
1633 crtl->need_drap = true;
1634
1635 dest = virtual_outgoing_args_rtx;
1636 #ifndef STACK_GROWS_DOWNWARD
1637 if (CONST_INT_P (argsize))
1638 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1639 else
1640 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1641 #endif
1642 dest = gen_rtx_MEM (BLKmode, dest);
1643 set_mem_align (dest, PARM_BOUNDARY);
1644 src = gen_rtx_MEM (BLKmode, incoming_args);
1645 set_mem_align (src, PARM_BOUNDARY);
1646 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1647
1648 /* Refer to the argument block. */
1649 apply_args_size ();
1650 arguments = gen_rtx_MEM (BLKmode, arguments);
1651 set_mem_align (arguments, PARM_BOUNDARY);
1652
1653 /* Walk past the arg-pointer and structure value address. */
1654 size = GET_MODE_SIZE (Pmode);
1655 if (struct_value)
1656 size += GET_MODE_SIZE (Pmode);
1657
1658 /* Restore each of the registers previously saved. Make USE insns
1659 for each of these registers for use in making the call. */
1660 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1661 if ((mode = apply_args_mode[regno]) != VOIDmode)
1662 {
1663 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1664 if (size % align != 0)
1665 size = CEIL (size, align) * align;
1666 reg = gen_rtx_REG (mode, regno);
1667 emit_move_insn (reg, adjust_address (arguments, mode, size));
1668 use_reg (&call_fusage, reg);
1669 size += GET_MODE_SIZE (mode);
1670 }
1671
1672 /* Restore the structure value address unless this is passed as an
1673 "invisible" first argument. */
1674 size = GET_MODE_SIZE (Pmode);
1675 if (struct_value)
1676 {
1677 rtx value = gen_reg_rtx (Pmode);
1678 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1679 emit_move_insn (struct_value, value);
1680 if (REG_P (struct_value))
1681 use_reg (&call_fusage, struct_value);
1682 size += GET_MODE_SIZE (Pmode);
1683 }
1684
1685 /* All arguments and registers used for the call are set up by now! */
1686 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1687
1688 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1689 and we don't want to load it into a register as an optimization,
1690 because prepare_call_address already did it if it should be done. */
1691 if (GET_CODE (function) != SYMBOL_REF)
1692 function = memory_address (FUNCTION_MODE, function);
1693
1694 /* Generate the actual call instruction and save the return value. */
1695 #ifdef HAVE_untyped_call
1696 if (HAVE_untyped_call)
1697 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1698 result, result_vector (1, result)));
1699 else
1700 #endif
1701 #ifdef HAVE_call_value
1702 if (HAVE_call_value)
1703 {
1704 rtx valreg = 0;
1705
1706 /* Locate the unique return register. It is not possible to
1707 express a call that sets more than one return register using
1708 call_value; use untyped_call for that. In fact, untyped_call
1709 only needs to save the return registers in the given block. */
1710 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1711 if ((mode = apply_result_mode[regno]) != VOIDmode)
1712 {
1713 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1714
1715 valreg = gen_rtx_REG (mode, regno);
1716 }
1717
1718 emit_call_insn (GEN_CALL_VALUE (valreg,
1719 gen_rtx_MEM (FUNCTION_MODE, function),
1720 const0_rtx, NULL_RTX, const0_rtx));
1721
1722 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1723 }
1724 else
1725 #endif
1726 gcc_unreachable ();
1727
1728 /* Find the CALL insn we just emitted, and attach the register usage
1729 information. */
1730 call_insn = last_call_insn ();
1731 add_function_usage_to (call_insn, call_fusage);
1732
1733 /* Restore the stack. */
1734 #ifdef HAVE_save_stack_nonlocal
1735 if (HAVE_save_stack_nonlocal)
1736 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1737 else
1738 #endif
1739 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1740 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1741
1742 OK_DEFER_POP;
1743
1744 /* Return the address of the result block. */
1745 result = copy_addr_to_reg (XEXP (result, 0));
1746 return convert_memory_address (ptr_mode, result);
1747 }
1748
1749 /* Perform an untyped return. */
1750
1751 static void
1752 expand_builtin_return (rtx result)
1753 {
1754 int size, align, regno;
1755 enum machine_mode mode;
1756 rtx reg;
1757 rtx call_fusage = 0;
1758
1759 result = convert_memory_address (Pmode, result);
1760
1761 apply_result_size ();
1762 result = gen_rtx_MEM (BLKmode, result);
1763
1764 #ifdef HAVE_untyped_return
1765 if (HAVE_untyped_return)
1766 {
1767 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1768 emit_barrier ();
1769 return;
1770 }
1771 #endif
1772
1773 /* Restore the return value and note that each value is used. */
1774 size = 0;
1775 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1776 if ((mode = apply_result_mode[regno]) != VOIDmode)
1777 {
1778 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1779 if (size % align != 0)
1780 size = CEIL (size, align) * align;
1781 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1782 emit_move_insn (reg, adjust_address (result, mode, size));
1783
1784 push_to_sequence (call_fusage);
1785 emit_use (reg);
1786 call_fusage = get_insns ();
1787 end_sequence ();
1788 size += GET_MODE_SIZE (mode);
1789 }
1790
1791 /* Put the USE insns before the return. */
1792 emit_insn (call_fusage);
1793
1794 /* Return whatever values was restored by jumping directly to the end
1795 of the function. */
1796 expand_naked_return ();
1797 }
1798
1799 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1800
1801 static enum type_class
1802 type_to_class (tree type)
1803 {
1804 switch (TREE_CODE (type))
1805 {
1806 case VOID_TYPE: return void_type_class;
1807 case INTEGER_TYPE: return integer_type_class;
1808 case ENUMERAL_TYPE: return enumeral_type_class;
1809 case BOOLEAN_TYPE: return boolean_type_class;
1810 case POINTER_TYPE: return pointer_type_class;
1811 case REFERENCE_TYPE: return reference_type_class;
1812 case OFFSET_TYPE: return offset_type_class;
1813 case REAL_TYPE: return real_type_class;
1814 case COMPLEX_TYPE: return complex_type_class;
1815 case FUNCTION_TYPE: return function_type_class;
1816 case METHOD_TYPE: return method_type_class;
1817 case RECORD_TYPE: return record_type_class;
1818 case UNION_TYPE:
1819 case QUAL_UNION_TYPE: return union_type_class;
1820 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1821 ? string_type_class : array_type_class);
1822 case LANG_TYPE: return lang_type_class;
1823 default: return no_type_class;
1824 }
1825 }
1826
1827 /* Expand a call EXP to __builtin_classify_type. */
1828
1829 static rtx
1830 expand_builtin_classify_type (tree exp)
1831 {
1832 if (call_expr_nargs (exp))
1833 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1834 return GEN_INT (no_type_class);
1835 }
1836
1837 /* This helper macro, meant to be used in mathfn_built_in below,
1838 determines which among a set of three builtin math functions is
1839 appropriate for a given type mode. The `F' and `L' cases are
1840 automatically generated from the `double' case. */
1841 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1842 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1843 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1844 fcodel = BUILT_IN_MATHFN##L ; break;
1845 /* Similar to above, but appends _R after any F/L suffix. */
1846 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1847 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1848 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1849 fcodel = BUILT_IN_MATHFN##L_R ; break;
1850
1851 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1852 if available. If IMPLICIT is true use the implicit builtin declaration,
1853 otherwise use the explicit declaration. If we can't do the conversion,
1854 return zero. */
1855
1856 static tree
1857 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1858 {
1859 enum built_in_function fcode, fcodef, fcodel, fcode2;
1860
1861 switch (fn)
1862 {
1863 CASE_MATHFN (BUILT_IN_ACOS)
1864 CASE_MATHFN (BUILT_IN_ACOSH)
1865 CASE_MATHFN (BUILT_IN_ASIN)
1866 CASE_MATHFN (BUILT_IN_ASINH)
1867 CASE_MATHFN (BUILT_IN_ATAN)
1868 CASE_MATHFN (BUILT_IN_ATAN2)
1869 CASE_MATHFN (BUILT_IN_ATANH)
1870 CASE_MATHFN (BUILT_IN_CBRT)
1871 CASE_MATHFN (BUILT_IN_CEIL)
1872 CASE_MATHFN (BUILT_IN_CEXPI)
1873 CASE_MATHFN (BUILT_IN_COPYSIGN)
1874 CASE_MATHFN (BUILT_IN_COS)
1875 CASE_MATHFN (BUILT_IN_COSH)
1876 CASE_MATHFN (BUILT_IN_DREM)
1877 CASE_MATHFN (BUILT_IN_ERF)
1878 CASE_MATHFN (BUILT_IN_ERFC)
1879 CASE_MATHFN (BUILT_IN_EXP)
1880 CASE_MATHFN (BUILT_IN_EXP10)
1881 CASE_MATHFN (BUILT_IN_EXP2)
1882 CASE_MATHFN (BUILT_IN_EXPM1)
1883 CASE_MATHFN (BUILT_IN_FABS)
1884 CASE_MATHFN (BUILT_IN_FDIM)
1885 CASE_MATHFN (BUILT_IN_FLOOR)
1886 CASE_MATHFN (BUILT_IN_FMA)
1887 CASE_MATHFN (BUILT_IN_FMAX)
1888 CASE_MATHFN (BUILT_IN_FMIN)
1889 CASE_MATHFN (BUILT_IN_FMOD)
1890 CASE_MATHFN (BUILT_IN_FREXP)
1891 CASE_MATHFN (BUILT_IN_GAMMA)
1892 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1893 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1894 CASE_MATHFN (BUILT_IN_HYPOT)
1895 CASE_MATHFN (BUILT_IN_ILOGB)
1896 CASE_MATHFN (BUILT_IN_ICEIL)
1897 CASE_MATHFN (BUILT_IN_IFLOOR)
1898 CASE_MATHFN (BUILT_IN_INF)
1899 CASE_MATHFN (BUILT_IN_IRINT)
1900 CASE_MATHFN (BUILT_IN_IROUND)
1901 CASE_MATHFN (BUILT_IN_ISINF)
1902 CASE_MATHFN (BUILT_IN_J0)
1903 CASE_MATHFN (BUILT_IN_J1)
1904 CASE_MATHFN (BUILT_IN_JN)
1905 CASE_MATHFN (BUILT_IN_LCEIL)
1906 CASE_MATHFN (BUILT_IN_LDEXP)
1907 CASE_MATHFN (BUILT_IN_LFLOOR)
1908 CASE_MATHFN (BUILT_IN_LGAMMA)
1909 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1910 CASE_MATHFN (BUILT_IN_LLCEIL)
1911 CASE_MATHFN (BUILT_IN_LLFLOOR)
1912 CASE_MATHFN (BUILT_IN_LLRINT)
1913 CASE_MATHFN (BUILT_IN_LLROUND)
1914 CASE_MATHFN (BUILT_IN_LOG)
1915 CASE_MATHFN (BUILT_IN_LOG10)
1916 CASE_MATHFN (BUILT_IN_LOG1P)
1917 CASE_MATHFN (BUILT_IN_LOG2)
1918 CASE_MATHFN (BUILT_IN_LOGB)
1919 CASE_MATHFN (BUILT_IN_LRINT)
1920 CASE_MATHFN (BUILT_IN_LROUND)
1921 CASE_MATHFN (BUILT_IN_MODF)
1922 CASE_MATHFN (BUILT_IN_NAN)
1923 CASE_MATHFN (BUILT_IN_NANS)
1924 CASE_MATHFN (BUILT_IN_NEARBYINT)
1925 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1926 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1927 CASE_MATHFN (BUILT_IN_POW)
1928 CASE_MATHFN (BUILT_IN_POWI)
1929 CASE_MATHFN (BUILT_IN_POW10)
1930 CASE_MATHFN (BUILT_IN_REMAINDER)
1931 CASE_MATHFN (BUILT_IN_REMQUO)
1932 CASE_MATHFN (BUILT_IN_RINT)
1933 CASE_MATHFN (BUILT_IN_ROUND)
1934 CASE_MATHFN (BUILT_IN_SCALB)
1935 CASE_MATHFN (BUILT_IN_SCALBLN)
1936 CASE_MATHFN (BUILT_IN_SCALBN)
1937 CASE_MATHFN (BUILT_IN_SIGNBIT)
1938 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1939 CASE_MATHFN (BUILT_IN_SIN)
1940 CASE_MATHFN (BUILT_IN_SINCOS)
1941 CASE_MATHFN (BUILT_IN_SINH)
1942 CASE_MATHFN (BUILT_IN_SQRT)
1943 CASE_MATHFN (BUILT_IN_TAN)
1944 CASE_MATHFN (BUILT_IN_TANH)
1945 CASE_MATHFN (BUILT_IN_TGAMMA)
1946 CASE_MATHFN (BUILT_IN_TRUNC)
1947 CASE_MATHFN (BUILT_IN_Y0)
1948 CASE_MATHFN (BUILT_IN_Y1)
1949 CASE_MATHFN (BUILT_IN_YN)
1950
1951 default:
1952 return NULL_TREE;
1953 }
1954
1955 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1956 fcode2 = fcode;
1957 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1958 fcode2 = fcodef;
1959 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1960 fcode2 = fcodel;
1961 else
1962 return NULL_TREE;
1963
1964 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1965 return NULL_TREE;
1966
1967 return builtin_decl_explicit (fcode2);
1968 }
1969
1970 /* Like mathfn_built_in_1(), but always use the implicit array. */
1971
1972 tree
1973 mathfn_built_in (tree type, enum built_in_function fn)
1974 {
1975 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1976 }
1977
1978 /* If errno must be maintained, expand the RTL to check if the result,
1979 TARGET, of a built-in function call, EXP, is NaN, and if so set
1980 errno to EDOM. */
1981
1982 static void
1983 expand_errno_check (tree exp, rtx target)
1984 {
1985 rtx lab = gen_label_rtx ();
1986
1987 /* Test the result; if it is NaN, set errno=EDOM because
1988 the argument was not in the domain. */
1989 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1990 NULL_RTX, NULL_RTX, lab,
1991 /* The jump is very likely. */
1992 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1993
1994 #ifdef TARGET_EDOM
1995 /* If this built-in doesn't throw an exception, set errno directly. */
1996 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1997 {
1998 #ifdef GEN_ERRNO_RTX
1999 rtx errno_rtx = GEN_ERRNO_RTX;
2000 #else
2001 rtx errno_rtx
2002 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2003 #endif
2004 emit_move_insn (errno_rtx,
2005 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2006 emit_label (lab);
2007 return;
2008 }
2009 #endif
2010
2011 /* Make sure the library call isn't expanded as a tail call. */
2012 CALL_EXPR_TAILCALL (exp) = 0;
2013
2014 /* We can't set errno=EDOM directly; let the library call do it.
2015 Pop the arguments right away in case the call gets deleted. */
2016 NO_DEFER_POP;
2017 expand_call (exp, target, 0);
2018 OK_DEFER_POP;
2019 emit_label (lab);
2020 }
2021
2022 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2023 Return NULL_RTX if a normal call should be emitted rather than expanding
2024 the function in-line. EXP is the expression that is a call to the builtin
2025 function; if convenient, the result should be placed in TARGET.
2026 SUBTARGET may be used as the target for computing one of EXP's operands. */
2027
2028 static rtx
2029 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2030 {
2031 optab builtin_optab;
2032 rtx op0, insns;
2033 tree fndecl = get_callee_fndecl (exp);
2034 enum machine_mode mode;
2035 bool errno_set = false;
2036 bool try_widening = false;
2037 tree arg;
2038
2039 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2040 return NULL_RTX;
2041
2042 arg = CALL_EXPR_ARG (exp, 0);
2043
2044 switch (DECL_FUNCTION_CODE (fndecl))
2045 {
2046 CASE_FLT_FN (BUILT_IN_SQRT):
2047 errno_set = ! tree_expr_nonnegative_p (arg);
2048 try_widening = true;
2049 builtin_optab = sqrt_optab;
2050 break;
2051 CASE_FLT_FN (BUILT_IN_EXP):
2052 errno_set = true; builtin_optab = exp_optab; break;
2053 CASE_FLT_FN (BUILT_IN_EXP10):
2054 CASE_FLT_FN (BUILT_IN_POW10):
2055 errno_set = true; builtin_optab = exp10_optab; break;
2056 CASE_FLT_FN (BUILT_IN_EXP2):
2057 errno_set = true; builtin_optab = exp2_optab; break;
2058 CASE_FLT_FN (BUILT_IN_EXPM1):
2059 errno_set = true; builtin_optab = expm1_optab; break;
2060 CASE_FLT_FN (BUILT_IN_LOGB):
2061 errno_set = true; builtin_optab = logb_optab; break;
2062 CASE_FLT_FN (BUILT_IN_LOG):
2063 errno_set = true; builtin_optab = log_optab; break;
2064 CASE_FLT_FN (BUILT_IN_LOG10):
2065 errno_set = true; builtin_optab = log10_optab; break;
2066 CASE_FLT_FN (BUILT_IN_LOG2):
2067 errno_set = true; builtin_optab = log2_optab; break;
2068 CASE_FLT_FN (BUILT_IN_LOG1P):
2069 errno_set = true; builtin_optab = log1p_optab; break;
2070 CASE_FLT_FN (BUILT_IN_ASIN):
2071 builtin_optab = asin_optab; break;
2072 CASE_FLT_FN (BUILT_IN_ACOS):
2073 builtin_optab = acos_optab; break;
2074 CASE_FLT_FN (BUILT_IN_TAN):
2075 builtin_optab = tan_optab; break;
2076 CASE_FLT_FN (BUILT_IN_ATAN):
2077 builtin_optab = atan_optab; break;
2078 CASE_FLT_FN (BUILT_IN_FLOOR):
2079 builtin_optab = floor_optab; break;
2080 CASE_FLT_FN (BUILT_IN_CEIL):
2081 builtin_optab = ceil_optab; break;
2082 CASE_FLT_FN (BUILT_IN_TRUNC):
2083 builtin_optab = btrunc_optab; break;
2084 CASE_FLT_FN (BUILT_IN_ROUND):
2085 builtin_optab = round_optab; break;
2086 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2087 builtin_optab = nearbyint_optab;
2088 if (flag_trapping_math)
2089 break;
2090 /* Else fallthrough and expand as rint. */
2091 CASE_FLT_FN (BUILT_IN_RINT):
2092 builtin_optab = rint_optab; break;
2093 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2094 builtin_optab = significand_optab; break;
2095 default:
2096 gcc_unreachable ();
2097 }
2098
2099 /* Make a suitable register to place result in. */
2100 mode = TYPE_MODE (TREE_TYPE (exp));
2101
2102 if (! flag_errno_math || ! HONOR_NANS (mode))
2103 errno_set = false;
2104
2105 /* Before working hard, check whether the instruction is available, but try
2106 to widen the mode for specific operations. */
2107 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2108 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2109 && (!errno_set || !optimize_insn_for_size_p ()))
2110 {
2111 rtx result = gen_reg_rtx (mode);
2112
2113 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2114 need to expand the argument again. This way, we will not perform
2115 side-effects more the once. */
2116 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2117
2118 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2119
2120 start_sequence ();
2121
2122 /* Compute into RESULT.
2123 Set RESULT to wherever the result comes back. */
2124 result = expand_unop (mode, builtin_optab, op0, result, 0);
2125
2126 if (result != 0)
2127 {
2128 if (errno_set)
2129 expand_errno_check (exp, result);
2130
2131 /* Output the entire sequence. */
2132 insns = get_insns ();
2133 end_sequence ();
2134 emit_insn (insns);
2135 return result;
2136 }
2137
2138 /* If we were unable to expand via the builtin, stop the sequence
2139 (without outputting the insns) and call to the library function
2140 with the stabilized argument list. */
2141 end_sequence ();
2142 }
2143
2144 return expand_call (exp, target, target == const0_rtx);
2145 }
2146
2147 /* Expand a call to the builtin binary math functions (pow and atan2).
2148 Return NULL_RTX if a normal call should be emitted rather than expanding the
2149 function in-line. EXP is the expression that is a call to the builtin
2150 function; if convenient, the result should be placed in TARGET.
2151 SUBTARGET may be used as the target for computing one of EXP's
2152 operands. */
2153
2154 static rtx
2155 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2156 {
2157 optab builtin_optab;
2158 rtx op0, op1, insns, result;
2159 int op1_type = REAL_TYPE;
2160 tree fndecl = get_callee_fndecl (exp);
2161 tree arg0, arg1;
2162 enum machine_mode mode;
2163 bool errno_set = true;
2164
2165 switch (DECL_FUNCTION_CODE (fndecl))
2166 {
2167 CASE_FLT_FN (BUILT_IN_SCALBN):
2168 CASE_FLT_FN (BUILT_IN_SCALBLN):
2169 CASE_FLT_FN (BUILT_IN_LDEXP):
2170 op1_type = INTEGER_TYPE;
2171 default:
2172 break;
2173 }
2174
2175 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2176 return NULL_RTX;
2177
2178 arg0 = CALL_EXPR_ARG (exp, 0);
2179 arg1 = CALL_EXPR_ARG (exp, 1);
2180
2181 switch (DECL_FUNCTION_CODE (fndecl))
2182 {
2183 CASE_FLT_FN (BUILT_IN_POW):
2184 builtin_optab = pow_optab; break;
2185 CASE_FLT_FN (BUILT_IN_ATAN2):
2186 builtin_optab = atan2_optab; break;
2187 CASE_FLT_FN (BUILT_IN_SCALB):
2188 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2189 return 0;
2190 builtin_optab = scalb_optab; break;
2191 CASE_FLT_FN (BUILT_IN_SCALBN):
2192 CASE_FLT_FN (BUILT_IN_SCALBLN):
2193 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2194 return 0;
2195 /* Fall through... */
2196 CASE_FLT_FN (BUILT_IN_LDEXP):
2197 builtin_optab = ldexp_optab; break;
2198 CASE_FLT_FN (BUILT_IN_FMOD):
2199 builtin_optab = fmod_optab; break;
2200 CASE_FLT_FN (BUILT_IN_REMAINDER):
2201 CASE_FLT_FN (BUILT_IN_DREM):
2202 builtin_optab = remainder_optab; break;
2203 default:
2204 gcc_unreachable ();
2205 }
2206
2207 /* Make a suitable register to place result in. */
2208 mode = TYPE_MODE (TREE_TYPE (exp));
2209
2210 /* Before working hard, check whether the instruction is available. */
2211 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2212 return NULL_RTX;
2213
2214 result = gen_reg_rtx (mode);
2215
2216 if (! flag_errno_math || ! HONOR_NANS (mode))
2217 errno_set = false;
2218
2219 if (errno_set && optimize_insn_for_size_p ())
2220 return 0;
2221
2222 /* Always stabilize the argument list. */
2223 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2224 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2225
2226 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2227 op1 = expand_normal (arg1);
2228
2229 start_sequence ();
2230
2231 /* Compute into RESULT.
2232 Set RESULT to wherever the result comes back. */
2233 result = expand_binop (mode, builtin_optab, op0, op1,
2234 result, 0, OPTAB_DIRECT);
2235
2236 /* If we were unable to expand via the builtin, stop the sequence
2237 (without outputting the insns) and call to the library function
2238 with the stabilized argument list. */
2239 if (result == 0)
2240 {
2241 end_sequence ();
2242 return expand_call (exp, target, target == const0_rtx);
2243 }
2244
2245 if (errno_set)
2246 expand_errno_check (exp, result);
2247
2248 /* Output the entire sequence. */
2249 insns = get_insns ();
2250 end_sequence ();
2251 emit_insn (insns);
2252
2253 return result;
2254 }
2255
2256 /* Expand a call to the builtin trinary math functions (fma).
2257 Return NULL_RTX if a normal call should be emitted rather than expanding the
2258 function in-line. EXP is the expression that is a call to the builtin
2259 function; if convenient, the result should be placed in TARGET.
2260 SUBTARGET may be used as the target for computing one of EXP's
2261 operands. */
2262
2263 static rtx
2264 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2265 {
2266 optab builtin_optab;
2267 rtx op0, op1, op2, insns, result;
2268 tree fndecl = get_callee_fndecl (exp);
2269 tree arg0, arg1, arg2;
2270 enum machine_mode mode;
2271
2272 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2273 return NULL_RTX;
2274
2275 arg0 = CALL_EXPR_ARG (exp, 0);
2276 arg1 = CALL_EXPR_ARG (exp, 1);
2277 arg2 = CALL_EXPR_ARG (exp, 2);
2278
2279 switch (DECL_FUNCTION_CODE (fndecl))
2280 {
2281 CASE_FLT_FN (BUILT_IN_FMA):
2282 builtin_optab = fma_optab; break;
2283 default:
2284 gcc_unreachable ();
2285 }
2286
2287 /* Make a suitable register to place result in. */
2288 mode = TYPE_MODE (TREE_TYPE (exp));
2289
2290 /* Before working hard, check whether the instruction is available. */
2291 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2292 return NULL_RTX;
2293
2294 result = gen_reg_rtx (mode);
2295
2296 /* Always stabilize the argument list. */
2297 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2298 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2299 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2300
2301 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2302 op1 = expand_normal (arg1);
2303 op2 = expand_normal (arg2);
2304
2305 start_sequence ();
2306
2307 /* Compute into RESULT.
2308 Set RESULT to wherever the result comes back. */
2309 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2310 result, 0);
2311
2312 /* If we were unable to expand via the builtin, stop the sequence
2313 (without outputting the insns) and call to the library function
2314 with the stabilized argument list. */
2315 if (result == 0)
2316 {
2317 end_sequence ();
2318 return expand_call (exp, target, target == const0_rtx);
2319 }
2320
2321 /* Output the entire sequence. */
2322 insns = get_insns ();
2323 end_sequence ();
2324 emit_insn (insns);
2325
2326 return result;
2327 }
2328
2329 /* Expand a call to the builtin sin and cos math functions.
2330 Return NULL_RTX if a normal call should be emitted rather than expanding the
2331 function in-line. EXP is the expression that is a call to the builtin
2332 function; if convenient, the result should be placed in TARGET.
2333 SUBTARGET may be used as the target for computing one of EXP's
2334 operands. */
2335
2336 static rtx
2337 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2338 {
2339 optab builtin_optab;
2340 rtx op0, insns;
2341 tree fndecl = get_callee_fndecl (exp);
2342 enum machine_mode mode;
2343 tree arg;
2344
2345 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2346 return NULL_RTX;
2347
2348 arg = CALL_EXPR_ARG (exp, 0);
2349
2350 switch (DECL_FUNCTION_CODE (fndecl))
2351 {
2352 CASE_FLT_FN (BUILT_IN_SIN):
2353 CASE_FLT_FN (BUILT_IN_COS):
2354 builtin_optab = sincos_optab; break;
2355 default:
2356 gcc_unreachable ();
2357 }
2358
2359 /* Make a suitable register to place result in. */
2360 mode = TYPE_MODE (TREE_TYPE (exp));
2361
2362 /* Check if sincos insn is available, otherwise fallback
2363 to sin or cos insn. */
2364 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2365 switch (DECL_FUNCTION_CODE (fndecl))
2366 {
2367 CASE_FLT_FN (BUILT_IN_SIN):
2368 builtin_optab = sin_optab; break;
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 builtin_optab = cos_optab; break;
2371 default:
2372 gcc_unreachable ();
2373 }
2374
2375 /* Before working hard, check whether the instruction is available. */
2376 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2377 {
2378 rtx result = gen_reg_rtx (mode);
2379
2380 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2381 need to expand the argument again. This way, we will not perform
2382 side-effects more the once. */
2383 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2384
2385 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2386
2387 start_sequence ();
2388
2389 /* Compute into RESULT.
2390 Set RESULT to wherever the result comes back. */
2391 if (builtin_optab == sincos_optab)
2392 {
2393 int ok;
2394
2395 switch (DECL_FUNCTION_CODE (fndecl))
2396 {
2397 CASE_FLT_FN (BUILT_IN_SIN):
2398 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2399 break;
2400 CASE_FLT_FN (BUILT_IN_COS):
2401 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2402 break;
2403 default:
2404 gcc_unreachable ();
2405 }
2406 gcc_assert (ok);
2407 }
2408 else
2409 result = expand_unop (mode, builtin_optab, op0, result, 0);
2410
2411 if (result != 0)
2412 {
2413 /* Output the entire sequence. */
2414 insns = get_insns ();
2415 end_sequence ();
2416 emit_insn (insns);
2417 return result;
2418 }
2419
2420 /* If we were unable to expand via the builtin, stop the sequence
2421 (without outputting the insns) and call to the library function
2422 with the stabilized argument list. */
2423 end_sequence ();
2424 }
2425
2426 return expand_call (exp, target, target == const0_rtx);
2427 }
2428
2429 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2430 return an RTL instruction code that implements the functionality.
2431 If that isn't possible or available return CODE_FOR_nothing. */
2432
2433 static enum insn_code
2434 interclass_mathfn_icode (tree arg, tree fndecl)
2435 {
2436 bool errno_set = false;
2437 optab builtin_optab = unknown_optab;
2438 enum machine_mode mode;
2439
2440 switch (DECL_FUNCTION_CODE (fndecl))
2441 {
2442 CASE_FLT_FN (BUILT_IN_ILOGB):
2443 errno_set = true; builtin_optab = ilogb_optab; break;
2444 CASE_FLT_FN (BUILT_IN_ISINF):
2445 builtin_optab = isinf_optab; break;
2446 case BUILT_IN_ISNORMAL:
2447 case BUILT_IN_ISFINITE:
2448 CASE_FLT_FN (BUILT_IN_FINITE):
2449 case BUILT_IN_FINITED32:
2450 case BUILT_IN_FINITED64:
2451 case BUILT_IN_FINITED128:
2452 case BUILT_IN_ISINFD32:
2453 case BUILT_IN_ISINFD64:
2454 case BUILT_IN_ISINFD128:
2455 /* These builtins have no optabs (yet). */
2456 break;
2457 default:
2458 gcc_unreachable ();
2459 }
2460
2461 /* There's no easy way to detect the case we need to set EDOM. */
2462 if (flag_errno_math && errno_set)
2463 return CODE_FOR_nothing;
2464
2465 /* Optab mode depends on the mode of the input argument. */
2466 mode = TYPE_MODE (TREE_TYPE (arg));
2467
2468 if (builtin_optab)
2469 return optab_handler (builtin_optab, mode);
2470 return CODE_FOR_nothing;
2471 }
2472
2473 /* Expand a call to one of the builtin math functions that operate on
2474 floating point argument and output an integer result (ilogb, isinf,
2475 isnan, etc).
2476 Return 0 if a normal call should be emitted rather than expanding the
2477 function in-line. EXP is the expression that is a call to the builtin
2478 function; if convenient, the result should be placed in TARGET. */
2479
2480 static rtx
2481 expand_builtin_interclass_mathfn (tree exp, rtx target)
2482 {
2483 enum insn_code icode = CODE_FOR_nothing;
2484 rtx op0;
2485 tree fndecl = get_callee_fndecl (exp);
2486 enum machine_mode mode;
2487 tree arg;
2488
2489 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2490 return NULL_RTX;
2491
2492 arg = CALL_EXPR_ARG (exp, 0);
2493 icode = interclass_mathfn_icode (arg, fndecl);
2494 mode = TYPE_MODE (TREE_TYPE (arg));
2495
2496 if (icode != CODE_FOR_nothing)
2497 {
2498 struct expand_operand ops[1];
2499 rtx last = get_last_insn ();
2500 tree orig_arg = arg;
2501
2502 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2503 need to expand the argument again. This way, we will not perform
2504 side-effects more the once. */
2505 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2506
2507 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2508
2509 if (mode != GET_MODE (op0))
2510 op0 = convert_to_mode (mode, op0, 0);
2511
2512 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2513 if (maybe_legitimize_operands (icode, 0, 1, ops)
2514 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2515 return ops[0].value;
2516
2517 delete_insns_since (last);
2518 CALL_EXPR_ARG (exp, 0) = orig_arg;
2519 }
2520
2521 return NULL_RTX;
2522 }
2523
2524 /* Expand a call to the builtin sincos math function.
2525 Return NULL_RTX if a normal call should be emitted rather than expanding the
2526 function in-line. EXP is the expression that is a call to the builtin
2527 function. */
2528
2529 static rtx
2530 expand_builtin_sincos (tree exp)
2531 {
2532 rtx op0, op1, op2, target1, target2;
2533 enum machine_mode mode;
2534 tree arg, sinp, cosp;
2535 int result;
2536 location_t loc = EXPR_LOCATION (exp);
2537 tree alias_type, alias_off;
2538
2539 if (!validate_arglist (exp, REAL_TYPE,
2540 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2541 return NULL_RTX;
2542
2543 arg = CALL_EXPR_ARG (exp, 0);
2544 sinp = CALL_EXPR_ARG (exp, 1);
2545 cosp = CALL_EXPR_ARG (exp, 2);
2546
2547 /* Make a suitable register to place result in. */
2548 mode = TYPE_MODE (TREE_TYPE (arg));
2549
2550 /* Check if sincos insn is available, otherwise emit the call. */
2551 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2552 return NULL_RTX;
2553
2554 target1 = gen_reg_rtx (mode);
2555 target2 = gen_reg_rtx (mode);
2556
2557 op0 = expand_normal (arg);
2558 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2559 alias_off = build_int_cst (alias_type, 0);
2560 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2561 sinp, alias_off));
2562 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2563 cosp, alias_off));
2564
2565 /* Compute into target1 and target2.
2566 Set TARGET to wherever the result comes back. */
2567 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2568 gcc_assert (result);
2569
2570 /* Move target1 and target2 to the memory locations indicated
2571 by op1 and op2. */
2572 emit_move_insn (op1, target1);
2573 emit_move_insn (op2, target2);
2574
2575 return const0_rtx;
2576 }
2577
2578 /* Expand a call to the internal cexpi builtin to the sincos math function.
2579 EXP is the expression that is a call to the builtin function; if convenient,
2580 the result should be placed in TARGET. */
2581
2582 static rtx
2583 expand_builtin_cexpi (tree exp, rtx target)
2584 {
2585 tree fndecl = get_callee_fndecl (exp);
2586 tree arg, type;
2587 enum machine_mode mode;
2588 rtx op0, op1, op2;
2589 location_t loc = EXPR_LOCATION (exp);
2590
2591 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2592 return NULL_RTX;
2593
2594 arg = CALL_EXPR_ARG (exp, 0);
2595 type = TREE_TYPE (arg);
2596 mode = TYPE_MODE (TREE_TYPE (arg));
2597
2598 /* Try expanding via a sincos optab, fall back to emitting a libcall
2599 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2600 is only generated from sincos, cexp or if we have either of them. */
2601 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2602 {
2603 op1 = gen_reg_rtx (mode);
2604 op2 = gen_reg_rtx (mode);
2605
2606 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2607
2608 /* Compute into op1 and op2. */
2609 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2610 }
2611 else if (targetm.libc_has_function (function_sincos))
2612 {
2613 tree call, fn = NULL_TREE;
2614 tree top1, top2;
2615 rtx op1a, op2a;
2616
2617 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2618 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2619 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2620 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2621 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2622 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2623 else
2624 gcc_unreachable ();
2625
2626 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2627 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2628 op1a = copy_addr_to_reg (XEXP (op1, 0));
2629 op2a = copy_addr_to_reg (XEXP (op2, 0));
2630 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2631 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2632
2633 /* Make sure not to fold the sincos call again. */
2634 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2635 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2636 call, 3, arg, top1, top2));
2637 }
2638 else
2639 {
2640 tree call, fn = NULL_TREE, narg;
2641 tree ctype = build_complex_type (type);
2642
2643 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2644 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2645 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2646 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2647 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2648 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2649 else
2650 gcc_unreachable ();
2651
2652 /* If we don't have a decl for cexp create one. This is the
2653 friendliest fallback if the user calls __builtin_cexpi
2654 without full target C99 function support. */
2655 if (fn == NULL_TREE)
2656 {
2657 tree fntype;
2658 const char *name = NULL;
2659
2660 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2661 name = "cexpf";
2662 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2663 name = "cexp";
2664 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2665 name = "cexpl";
2666
2667 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2668 fn = build_fn_decl (name, fntype);
2669 }
2670
2671 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2672 build_real (type, dconst0), arg);
2673
2674 /* Make sure not to fold the cexp call again. */
2675 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2676 return expand_expr (build_call_nary (ctype, call, 1, narg),
2677 target, VOIDmode, EXPAND_NORMAL);
2678 }
2679
2680 /* Now build the proper return type. */
2681 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2682 make_tree (TREE_TYPE (arg), op2),
2683 make_tree (TREE_TYPE (arg), op1)),
2684 target, VOIDmode, EXPAND_NORMAL);
2685 }
2686
2687 /* Conveniently construct a function call expression. FNDECL names the
2688 function to be called, N is the number of arguments, and the "..."
2689 parameters are the argument expressions. Unlike build_call_exr
2690 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2691
2692 static tree
2693 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2694 {
2695 va_list ap;
2696 tree fntype = TREE_TYPE (fndecl);
2697 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2698
2699 va_start (ap, n);
2700 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2701 va_end (ap);
2702 SET_EXPR_LOCATION (fn, loc);
2703 return fn;
2704 }
2705
2706 /* Expand a call to one of the builtin rounding functions gcc defines
2707 as an extension (lfloor and lceil). As these are gcc extensions we
2708 do not need to worry about setting errno to EDOM.
2709 If expanding via optab fails, lower expression to (int)(floor(x)).
2710 EXP is the expression that is a call to the builtin function;
2711 if convenient, the result should be placed in TARGET. */
2712
2713 static rtx
2714 expand_builtin_int_roundingfn (tree exp, rtx target)
2715 {
2716 convert_optab builtin_optab;
2717 rtx op0, insns, tmp;
2718 tree fndecl = get_callee_fndecl (exp);
2719 enum built_in_function fallback_fn;
2720 tree fallback_fndecl;
2721 enum machine_mode mode;
2722 tree arg;
2723
2724 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2725 gcc_unreachable ();
2726
2727 arg = CALL_EXPR_ARG (exp, 0);
2728
2729 switch (DECL_FUNCTION_CODE (fndecl))
2730 {
2731 CASE_FLT_FN (BUILT_IN_ICEIL):
2732 CASE_FLT_FN (BUILT_IN_LCEIL):
2733 CASE_FLT_FN (BUILT_IN_LLCEIL):
2734 builtin_optab = lceil_optab;
2735 fallback_fn = BUILT_IN_CEIL;
2736 break;
2737
2738 CASE_FLT_FN (BUILT_IN_IFLOOR):
2739 CASE_FLT_FN (BUILT_IN_LFLOOR):
2740 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2741 builtin_optab = lfloor_optab;
2742 fallback_fn = BUILT_IN_FLOOR;
2743 break;
2744
2745 default:
2746 gcc_unreachable ();
2747 }
2748
2749 /* Make a suitable register to place result in. */
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2751
2752 target = gen_reg_rtx (mode);
2753
2754 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2755 need to expand the argument again. This way, we will not perform
2756 side-effects more the once. */
2757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2758
2759 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2760
2761 start_sequence ();
2762
2763 /* Compute into TARGET. */
2764 if (expand_sfix_optab (target, op0, builtin_optab))
2765 {
2766 /* Output the entire sequence. */
2767 insns = get_insns ();
2768 end_sequence ();
2769 emit_insn (insns);
2770 return target;
2771 }
2772
2773 /* If we were unable to expand via the builtin, stop the sequence
2774 (without outputting the insns). */
2775 end_sequence ();
2776
2777 /* Fall back to floating point rounding optab. */
2778 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2779
2780 /* For non-C99 targets we may end up without a fallback fndecl here
2781 if the user called __builtin_lfloor directly. In this case emit
2782 a call to the floor/ceil variants nevertheless. This should result
2783 in the best user experience for not full C99 targets. */
2784 if (fallback_fndecl == NULL_TREE)
2785 {
2786 tree fntype;
2787 const char *name = NULL;
2788
2789 switch (DECL_FUNCTION_CODE (fndecl))
2790 {
2791 case BUILT_IN_ICEIL:
2792 case BUILT_IN_LCEIL:
2793 case BUILT_IN_LLCEIL:
2794 name = "ceil";
2795 break;
2796 case BUILT_IN_ICEILF:
2797 case BUILT_IN_LCEILF:
2798 case BUILT_IN_LLCEILF:
2799 name = "ceilf";
2800 break;
2801 case BUILT_IN_ICEILL:
2802 case BUILT_IN_LCEILL:
2803 case BUILT_IN_LLCEILL:
2804 name = "ceill";
2805 break;
2806 case BUILT_IN_IFLOOR:
2807 case BUILT_IN_LFLOOR:
2808 case BUILT_IN_LLFLOOR:
2809 name = "floor";
2810 break;
2811 case BUILT_IN_IFLOORF:
2812 case BUILT_IN_LFLOORF:
2813 case BUILT_IN_LLFLOORF:
2814 name = "floorf";
2815 break;
2816 case BUILT_IN_IFLOORL:
2817 case BUILT_IN_LFLOORL:
2818 case BUILT_IN_LLFLOORL:
2819 name = "floorl";
2820 break;
2821 default:
2822 gcc_unreachable ();
2823 }
2824
2825 fntype = build_function_type_list (TREE_TYPE (arg),
2826 TREE_TYPE (arg), NULL_TREE);
2827 fallback_fndecl = build_fn_decl (name, fntype);
2828 }
2829
2830 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2831
2832 tmp = expand_normal (exp);
2833 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2834
2835 /* Truncate the result of floating point optab to integer
2836 via expand_fix (). */
2837 target = gen_reg_rtx (mode);
2838 expand_fix (target, tmp, 0);
2839
2840 return target;
2841 }
2842
2843 /* Expand a call to one of the builtin math functions doing integer
2844 conversion (lrint).
2845 Return 0 if a normal call should be emitted rather than expanding the
2846 function in-line. EXP is the expression that is a call to the builtin
2847 function; if convenient, the result should be placed in TARGET. */
2848
2849 static rtx
2850 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2851 {
2852 convert_optab builtin_optab;
2853 rtx op0, insns;
2854 tree fndecl = get_callee_fndecl (exp);
2855 tree arg;
2856 enum machine_mode mode;
2857 enum built_in_function fallback_fn = BUILT_IN_NONE;
2858
2859 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2860 gcc_unreachable ();
2861
2862 arg = CALL_EXPR_ARG (exp, 0);
2863
2864 switch (DECL_FUNCTION_CODE (fndecl))
2865 {
2866 CASE_FLT_FN (BUILT_IN_IRINT):
2867 fallback_fn = BUILT_IN_LRINT;
2868 /* FALLTHRU */
2869 CASE_FLT_FN (BUILT_IN_LRINT):
2870 CASE_FLT_FN (BUILT_IN_LLRINT):
2871 builtin_optab = lrint_optab;
2872 break;
2873
2874 CASE_FLT_FN (BUILT_IN_IROUND):
2875 fallback_fn = BUILT_IN_LROUND;
2876 /* FALLTHRU */
2877 CASE_FLT_FN (BUILT_IN_LROUND):
2878 CASE_FLT_FN (BUILT_IN_LLROUND):
2879 builtin_optab = lround_optab;
2880 break;
2881
2882 default:
2883 gcc_unreachable ();
2884 }
2885
2886 /* There's no easy way to detect the case we need to set EDOM. */
2887 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2888 return NULL_RTX;
2889
2890 /* Make a suitable register to place result in. */
2891 mode = TYPE_MODE (TREE_TYPE (exp));
2892
2893 /* There's no easy way to detect the case we need to set EDOM. */
2894 if (!flag_errno_math)
2895 {
2896 rtx result = gen_reg_rtx (mode);
2897
2898 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2899 need to expand the argument again. This way, we will not perform
2900 side-effects more the once. */
2901 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2902
2903 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2904
2905 start_sequence ();
2906
2907 if (expand_sfix_optab (result, op0, builtin_optab))
2908 {
2909 /* Output the entire sequence. */
2910 insns = get_insns ();
2911 end_sequence ();
2912 emit_insn (insns);
2913 return result;
2914 }
2915
2916 /* If we were unable to expand via the builtin, stop the sequence
2917 (without outputting the insns) and call to the library function
2918 with the stabilized argument list. */
2919 end_sequence ();
2920 }
2921
2922 if (fallback_fn != BUILT_IN_NONE)
2923 {
2924 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2925 targets, (int) round (x) should never be transformed into
2926 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2927 a call to lround in the hope that the target provides at least some
2928 C99 functions. This should result in the best user experience for
2929 not full C99 targets. */
2930 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2931 fallback_fn, 0);
2932
2933 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2934 fallback_fndecl, 1, arg);
2935
2936 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2937 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2938 return convert_to_mode (mode, target, 0);
2939 }
2940
2941 return expand_call (exp, target, target == const0_rtx);
2942 }
2943
2944 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2945 a normal call should be emitted rather than expanding the function
2946 in-line. EXP is the expression that is a call to the builtin
2947 function; if convenient, the result should be placed in TARGET. */
2948
2949 static rtx
2950 expand_builtin_powi (tree exp, rtx target)
2951 {
2952 tree arg0, arg1;
2953 rtx op0, op1;
2954 enum machine_mode mode;
2955 enum machine_mode mode2;
2956
2957 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2958 return NULL_RTX;
2959
2960 arg0 = CALL_EXPR_ARG (exp, 0);
2961 arg1 = CALL_EXPR_ARG (exp, 1);
2962 mode = TYPE_MODE (TREE_TYPE (exp));
2963
2964 /* Emit a libcall to libgcc. */
2965
2966 /* Mode of the 2nd argument must match that of an int. */
2967 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2968
2969 if (target == NULL_RTX)
2970 target = gen_reg_rtx (mode);
2971
2972 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2973 if (GET_MODE (op0) != mode)
2974 op0 = convert_to_mode (mode, op0, 0);
2975 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2976 if (GET_MODE (op1) != mode2)
2977 op1 = convert_to_mode (mode2, op1, 0);
2978
2979 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2980 target, LCT_CONST, mode, 2,
2981 op0, mode, op1, mode2);
2982
2983 return target;
2984 }
2985
2986 /* Expand expression EXP which is a call to the strlen builtin. Return
2987 NULL_RTX if we failed the caller should emit a normal call, otherwise
2988 try to get the result in TARGET, if convenient. */
2989
2990 static rtx
2991 expand_builtin_strlen (tree exp, rtx target,
2992 enum machine_mode target_mode)
2993 {
2994 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2995 return NULL_RTX;
2996 else
2997 {
2998 struct expand_operand ops[4];
2999 rtx pat;
3000 tree len;
3001 tree src = CALL_EXPR_ARG (exp, 0);
3002 rtx src_reg, before_strlen;
3003 enum machine_mode insn_mode = target_mode;
3004 enum insn_code icode = CODE_FOR_nothing;
3005 unsigned int align;
3006
3007 /* If the length can be computed at compile-time, return it. */
3008 len = c_strlen (src, 0);
3009 if (len)
3010 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3011
3012 /* If the length can be computed at compile-time and is constant
3013 integer, but there are side-effects in src, evaluate
3014 src for side-effects, then return len.
3015 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3016 can be optimized into: i++; x = 3; */
3017 len = c_strlen (src, 1);
3018 if (len && TREE_CODE (len) == INTEGER_CST)
3019 {
3020 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3021 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3022 }
3023
3024 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3025
3026 /* If SRC is not a pointer type, don't do this operation inline. */
3027 if (align == 0)
3028 return NULL_RTX;
3029
3030 /* Bail out if we can't compute strlen in the right mode. */
3031 while (insn_mode != VOIDmode)
3032 {
3033 icode = optab_handler (strlen_optab, insn_mode);
3034 if (icode != CODE_FOR_nothing)
3035 break;
3036
3037 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3038 }
3039 if (insn_mode == VOIDmode)
3040 return NULL_RTX;
3041
3042 /* Make a place to hold the source address. We will not expand
3043 the actual source until we are sure that the expansion will
3044 not fail -- there are trees that cannot be expanded twice. */
3045 src_reg = gen_reg_rtx (Pmode);
3046
3047 /* Mark the beginning of the strlen sequence so we can emit the
3048 source operand later. */
3049 before_strlen = get_last_insn ();
3050
3051 create_output_operand (&ops[0], target, insn_mode);
3052 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3053 create_integer_operand (&ops[2], 0);
3054 create_integer_operand (&ops[3], align);
3055 if (!maybe_expand_insn (icode, 4, ops))
3056 return NULL_RTX;
3057
3058 /* Now that we are assured of success, expand the source. */
3059 start_sequence ();
3060 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3061 if (pat != src_reg)
3062 {
3063 #ifdef POINTERS_EXTEND_UNSIGNED
3064 if (GET_MODE (pat) != Pmode)
3065 pat = convert_to_mode (Pmode, pat,
3066 POINTERS_EXTEND_UNSIGNED);
3067 #endif
3068 emit_move_insn (src_reg, pat);
3069 }
3070 pat = get_insns ();
3071 end_sequence ();
3072
3073 if (before_strlen)
3074 emit_insn_after (pat, before_strlen);
3075 else
3076 emit_insn_before (pat, get_insns ());
3077
3078 /* Return the value in the proper mode for this function. */
3079 if (GET_MODE (ops[0].value) == target_mode)
3080 target = ops[0].value;
3081 else if (target != 0)
3082 convert_move (target, ops[0].value, 0);
3083 else
3084 target = convert_to_mode (target_mode, ops[0].value, 0);
3085
3086 return target;
3087 }
3088 }
3089
3090 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3091 bytes from constant string DATA + OFFSET and return it as target
3092 constant. */
3093
3094 static rtx
3095 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3096 enum machine_mode mode)
3097 {
3098 const char *str = (const char *) data;
3099
3100 gcc_assert (offset >= 0
3101 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3102 <= strlen (str) + 1));
3103
3104 return c_readstr (str + offset, mode);
3105 }
3106
3107 /* LEN specify length of the block of memcpy/memset operation.
3108 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3109 In some cases we can make very likely guess on max size, then we
3110 set it into PROBABLE_MAX_SIZE. */
3111
3112 static void
3113 determine_block_size (tree len, rtx len_rtx,
3114 unsigned HOST_WIDE_INT *min_size,
3115 unsigned HOST_WIDE_INT *max_size,
3116 unsigned HOST_WIDE_INT *probable_max_size)
3117 {
3118 if (CONST_INT_P (len_rtx))
3119 {
3120 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3121 return;
3122 }
3123 else
3124 {
3125 double_int min, max;
3126 enum value_range_type range_type = VR_UNDEFINED;
3127
3128 /* Determine bounds from the type. */
3129 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3130 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3131 else
3132 *min_size = 0;
3133 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3134 *probable_max_size = *max_size
3135 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3136 else
3137 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3138
3139 if (TREE_CODE (len) == SSA_NAME)
3140 range_type = get_range_info (len, &min, &max);
3141 if (range_type == VR_RANGE)
3142 {
3143 if (min.fits_uhwi () && *min_size < min.to_uhwi ())
3144 *min_size = min.to_uhwi ();
3145 if (max.fits_uhwi () && *max_size > max.to_uhwi ())
3146 *probable_max_size = *max_size = max.to_uhwi ();
3147 }
3148 else if (range_type == VR_ANTI_RANGE)
3149 {
3150 /* Anti range 0...N lets us to determine minimal size to N+1. */
3151 if (min.is_zero ())
3152 {
3153 if ((max + double_int_one).fits_uhwi ())
3154 *min_size = (max + double_int_one).to_uhwi ();
3155 }
3156 /* Code like
3157
3158 int n;
3159 if (n < 100)
3160 memcpy (a, b, n)
3161
3162 Produce anti range allowing negative values of N. We still
3163 can use the information and make a guess that N is not negative.
3164 */
3165 else if (!max.ule (double_int_one.lshift (30))
3166 && min.fits_uhwi ())
3167 *probable_max_size = min.to_uhwi () - 1;
3168 }
3169 }
3170 gcc_checking_assert (*max_size <=
3171 (unsigned HOST_WIDE_INT)
3172 GET_MODE_MASK (GET_MODE (len_rtx)));
3173 }
3174
3175 /* Expand a call EXP to the memcpy builtin.
3176 Return NULL_RTX if we failed, the caller should emit a normal call,
3177 otherwise try to get the result in TARGET, if convenient (and in
3178 mode MODE if that's convenient). */
3179
3180 static rtx
3181 expand_builtin_memcpy (tree exp, rtx target)
3182 {
3183 if (!validate_arglist (exp,
3184 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3185 return NULL_RTX;
3186 else
3187 {
3188 tree dest = CALL_EXPR_ARG (exp, 0);
3189 tree src = CALL_EXPR_ARG (exp, 1);
3190 tree len = CALL_EXPR_ARG (exp, 2);
3191 const char *src_str;
3192 unsigned int src_align = get_pointer_alignment (src);
3193 unsigned int dest_align = get_pointer_alignment (dest);
3194 rtx dest_mem, src_mem, dest_addr, len_rtx;
3195 HOST_WIDE_INT expected_size = -1;
3196 unsigned int expected_align = 0;
3197 unsigned HOST_WIDE_INT min_size;
3198 unsigned HOST_WIDE_INT max_size;
3199 unsigned HOST_WIDE_INT probable_max_size;
3200
3201 /* If DEST is not a pointer type, call the normal function. */
3202 if (dest_align == 0)
3203 return NULL_RTX;
3204
3205 /* If either SRC is not a pointer type, don't do this
3206 operation in-line. */
3207 if (src_align == 0)
3208 return NULL_RTX;
3209
3210 if (currently_expanding_gimple_stmt)
3211 stringop_block_profile (currently_expanding_gimple_stmt,
3212 &expected_align, &expected_size);
3213
3214 if (expected_align < dest_align)
3215 expected_align = dest_align;
3216 dest_mem = get_memory_rtx (dest, len);
3217 set_mem_align (dest_mem, dest_align);
3218 len_rtx = expand_normal (len);
3219 determine_block_size (len, len_rtx, &min_size, &max_size,
3220 &probable_max_size);
3221 src_str = c_getstr (src);
3222
3223 /* If SRC is a string constant and block move would be done
3224 by pieces, we can avoid loading the string from memory
3225 and only stored the computed constants. */
3226 if (src_str
3227 && CONST_INT_P (len_rtx)
3228 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3229 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3230 CONST_CAST (char *, src_str),
3231 dest_align, false))
3232 {
3233 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3234 builtin_memcpy_read_str,
3235 CONST_CAST (char *, src_str),
3236 dest_align, false, 0);
3237 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3238 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3239 return dest_mem;
3240 }
3241
3242 src_mem = get_memory_rtx (src, len);
3243 set_mem_align (src_mem, src_align);
3244
3245 /* Copy word part most expediently. */
3246 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3247 CALL_EXPR_TAILCALL (exp)
3248 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3249 expected_align, expected_size,
3250 min_size, max_size, probable_max_size);
3251
3252 if (dest_addr == 0)
3253 {
3254 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3255 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3256 }
3257 return dest_addr;
3258 }
3259 }
3260
3261 /* Expand a call EXP to the mempcpy builtin.
3262 Return NULL_RTX if we failed; the caller should emit a normal call,
3263 otherwise try to get the result in TARGET, if convenient (and in
3264 mode MODE if that's convenient). If ENDP is 0 return the
3265 destination pointer, if ENDP is 1 return the end pointer ala
3266 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3267 stpcpy. */
3268
3269 static rtx
3270 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3271 {
3272 if (!validate_arglist (exp,
3273 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3276 {
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 1);
3279 tree len = CALL_EXPR_ARG (exp, 2);
3280 return expand_builtin_mempcpy_args (dest, src, len,
3281 target, mode, /*endp=*/ 1);
3282 }
3283 }
3284
3285 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3286 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3287 so that this can also be called without constructing an actual CALL_EXPR.
3288 The other arguments and return value are the same as for
3289 expand_builtin_mempcpy. */
3290
3291 static rtx
3292 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3293 rtx target, enum machine_mode mode, int endp)
3294 {
3295 /* If return value is ignored, transform mempcpy into memcpy. */
3296 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3297 {
3298 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3299 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3300 dest, src, len);
3301 return expand_expr (result, target, mode, EXPAND_NORMAL);
3302 }
3303 else
3304 {
3305 const char *src_str;
3306 unsigned int src_align = get_pointer_alignment (src);
3307 unsigned int dest_align = get_pointer_alignment (dest);
3308 rtx dest_mem, src_mem, len_rtx;
3309
3310 /* If either SRC or DEST is not a pointer type, don't do this
3311 operation in-line. */
3312 if (dest_align == 0 || src_align == 0)
3313 return NULL_RTX;
3314
3315 /* If LEN is not constant, call the normal function. */
3316 if (! tree_fits_uhwi_p (len))
3317 return NULL_RTX;
3318
3319 len_rtx = expand_normal (len);
3320 src_str = c_getstr (src);
3321
3322 /* If SRC is a string constant and block move would be done
3323 by pieces, we can avoid loading the string from memory
3324 and only stored the computed constants. */
3325 if (src_str
3326 && CONST_INT_P (len_rtx)
3327 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3328 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3329 CONST_CAST (char *, src_str),
3330 dest_align, false))
3331 {
3332 dest_mem = get_memory_rtx (dest, len);
3333 set_mem_align (dest_mem, dest_align);
3334 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3335 builtin_memcpy_read_str,
3336 CONST_CAST (char *, src_str),
3337 dest_align, false, endp);
3338 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3339 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3340 return dest_mem;
3341 }
3342
3343 if (CONST_INT_P (len_rtx)
3344 && can_move_by_pieces (INTVAL (len_rtx),
3345 MIN (dest_align, src_align)))
3346 {
3347 dest_mem = get_memory_rtx (dest, len);
3348 set_mem_align (dest_mem, dest_align);
3349 src_mem = get_memory_rtx (src, len);
3350 set_mem_align (src_mem, src_align);
3351 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3352 MIN (dest_align, src_align), endp);
3353 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3354 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3355 return dest_mem;
3356 }
3357
3358 return NULL_RTX;
3359 }
3360 }
3361
3362 #ifndef HAVE_movstr
3363 # define HAVE_movstr 0
3364 # define CODE_FOR_movstr CODE_FOR_nothing
3365 #endif
3366
3367 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3368 we failed, the caller should emit a normal call, otherwise try to
3369 get the result in TARGET, if convenient. If ENDP is 0 return the
3370 destination pointer, if ENDP is 1 return the end pointer ala
3371 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3372 stpcpy. */
3373
3374 static rtx
3375 expand_movstr (tree dest, tree src, rtx target, int endp)
3376 {
3377 struct expand_operand ops[3];
3378 rtx dest_mem;
3379 rtx src_mem;
3380
3381 if (!HAVE_movstr)
3382 return NULL_RTX;
3383
3384 dest_mem = get_memory_rtx (dest, NULL);
3385 src_mem = get_memory_rtx (src, NULL);
3386 if (!endp)
3387 {
3388 target = force_reg (Pmode, XEXP (dest_mem, 0));
3389 dest_mem = replace_equiv_address (dest_mem, target);
3390 }
3391
3392 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3393 create_fixed_operand (&ops[1], dest_mem);
3394 create_fixed_operand (&ops[2], src_mem);
3395 expand_insn (CODE_FOR_movstr, 3, ops);
3396
3397 if (endp && target != const0_rtx)
3398 {
3399 target = ops[0].value;
3400 /* movstr is supposed to set end to the address of the NUL
3401 terminator. If the caller requested a mempcpy-like return value,
3402 adjust it. */
3403 if (endp == 1)
3404 {
3405 rtx tem = plus_constant (GET_MODE (target),
3406 gen_lowpart (GET_MODE (target), target), 1);
3407 emit_move_insn (target, force_operand (tem, NULL_RTX));
3408 }
3409 }
3410 return target;
3411 }
3412
3413 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3414 NULL_RTX if we failed the caller should emit a normal call, otherwise
3415 try to get the result in TARGET, if convenient (and in mode MODE if that's
3416 convenient). */
3417
3418 static rtx
3419 expand_builtin_strcpy (tree exp, rtx target)
3420 {
3421 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3422 {
3423 tree dest = CALL_EXPR_ARG (exp, 0);
3424 tree src = CALL_EXPR_ARG (exp, 1);
3425 return expand_builtin_strcpy_args (dest, src, target);
3426 }
3427 return NULL_RTX;
3428 }
3429
3430 /* Helper function to do the actual work for expand_builtin_strcpy. The
3431 arguments to the builtin_strcpy call DEST and SRC are broken out
3432 so that this can also be called without constructing an actual CALL_EXPR.
3433 The other arguments and return value are the same as for
3434 expand_builtin_strcpy. */
3435
3436 static rtx
3437 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3438 {
3439 return expand_movstr (dest, src, target, /*endp=*/0);
3440 }
3441
3442 /* Expand a call EXP to the stpcpy builtin.
3443 Return NULL_RTX if we failed the caller should emit a normal call,
3444 otherwise try to get the result in TARGET, if convenient (and in
3445 mode MODE if that's convenient). */
3446
3447 static rtx
3448 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3449 {
3450 tree dst, src;
3451 location_t loc = EXPR_LOCATION (exp);
3452
3453 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3454 return NULL_RTX;
3455
3456 dst = CALL_EXPR_ARG (exp, 0);
3457 src = CALL_EXPR_ARG (exp, 1);
3458
3459 /* If return value is ignored, transform stpcpy into strcpy. */
3460 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3461 {
3462 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3463 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3464 return expand_expr (result, target, mode, EXPAND_NORMAL);
3465 }
3466 else
3467 {
3468 tree len, lenp1;
3469 rtx ret;
3470
3471 /* Ensure we get an actual string whose length can be evaluated at
3472 compile-time, not an expression containing a string. This is
3473 because the latter will potentially produce pessimized code
3474 when used to produce the return value. */
3475 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3476 return expand_movstr (dst, src, target, /*endp=*/2);
3477
3478 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3479 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3480 target, mode, /*endp=*/2);
3481
3482 if (ret)
3483 return ret;
3484
3485 if (TREE_CODE (len) == INTEGER_CST)
3486 {
3487 rtx len_rtx = expand_normal (len);
3488
3489 if (CONST_INT_P (len_rtx))
3490 {
3491 ret = expand_builtin_strcpy_args (dst, src, target);
3492
3493 if (ret)
3494 {
3495 if (! target)
3496 {
3497 if (mode != VOIDmode)
3498 target = gen_reg_rtx (mode);
3499 else
3500 target = gen_reg_rtx (GET_MODE (ret));
3501 }
3502 if (GET_MODE (target) != GET_MODE (ret))
3503 ret = gen_lowpart (GET_MODE (target), ret);
3504
3505 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3506 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3507 gcc_assert (ret);
3508
3509 return target;
3510 }
3511 }
3512 }
3513
3514 return expand_movstr (dst, src, target, /*endp=*/2);
3515 }
3516 }
3517
3518 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3519 bytes from constant string DATA + OFFSET and return it as target
3520 constant. */
3521
3522 rtx
3523 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3524 enum machine_mode mode)
3525 {
3526 const char *str = (const char *) data;
3527
3528 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3529 return const0_rtx;
3530
3531 return c_readstr (str + offset, mode);
3532 }
3533
3534 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3535 NULL_RTX if we failed the caller should emit a normal call. */
3536
3537 static rtx
3538 expand_builtin_strncpy (tree exp, rtx target)
3539 {
3540 location_t loc = EXPR_LOCATION (exp);
3541
3542 if (validate_arglist (exp,
3543 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3544 {
3545 tree dest = CALL_EXPR_ARG (exp, 0);
3546 tree src = CALL_EXPR_ARG (exp, 1);
3547 tree len = CALL_EXPR_ARG (exp, 2);
3548 tree slen = c_strlen (src, 1);
3549
3550 /* We must be passed a constant len and src parameter. */
3551 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3552 return NULL_RTX;
3553
3554 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3555
3556 /* We're required to pad with trailing zeros if the requested
3557 len is greater than strlen(s2)+1. In that case try to
3558 use store_by_pieces, if it fails, punt. */
3559 if (tree_int_cst_lt (slen, len))
3560 {
3561 unsigned int dest_align = get_pointer_alignment (dest);
3562 const char *p = c_getstr (src);
3563 rtx dest_mem;
3564
3565 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3566 || !can_store_by_pieces (tree_to_uhwi (len),
3567 builtin_strncpy_read_str,
3568 CONST_CAST (char *, p),
3569 dest_align, false))
3570 return NULL_RTX;
3571
3572 dest_mem = get_memory_rtx (dest, len);
3573 store_by_pieces (dest_mem, tree_to_uhwi (len),
3574 builtin_strncpy_read_str,
3575 CONST_CAST (char *, p), dest_align, false, 0);
3576 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3577 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3578 return dest_mem;
3579 }
3580 }
3581 return NULL_RTX;
3582 }
3583
3584 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3585 bytes from constant string DATA + OFFSET and return it as target
3586 constant. */
3587
3588 rtx
3589 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3590 enum machine_mode mode)
3591 {
3592 const char *c = (const char *) data;
3593 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3594
3595 memset (p, *c, GET_MODE_SIZE (mode));
3596
3597 return c_readstr (p, mode);
3598 }
3599
3600 /* Callback routine for store_by_pieces. Return the RTL of a register
3601 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3602 char value given in the RTL register data. For example, if mode is
3603 4 bytes wide, return the RTL for 0x01010101*data. */
3604
3605 static rtx
3606 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3607 enum machine_mode mode)
3608 {
3609 rtx target, coeff;
3610 size_t size;
3611 char *p;
3612
3613 size = GET_MODE_SIZE (mode);
3614 if (size == 1)
3615 return (rtx) data;
3616
3617 p = XALLOCAVEC (char, size);
3618 memset (p, 1, size);
3619 coeff = c_readstr (p, mode);
3620
3621 target = convert_to_mode (mode, (rtx) data, 1);
3622 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3623 return force_reg (mode, target);
3624 }
3625
3626 /* Expand expression EXP, which is a call to the memset builtin. Return
3627 NULL_RTX if we failed the caller should emit a normal call, otherwise
3628 try to get the result in TARGET, if convenient (and in mode MODE if that's
3629 convenient). */
3630
3631 static rtx
3632 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3633 {
3634 if (!validate_arglist (exp,
3635 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3636 return NULL_RTX;
3637 else
3638 {
3639 tree dest = CALL_EXPR_ARG (exp, 0);
3640 tree val = CALL_EXPR_ARG (exp, 1);
3641 tree len = CALL_EXPR_ARG (exp, 2);
3642 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3643 }
3644 }
3645
3646 /* Helper function to do the actual work for expand_builtin_memset. The
3647 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3648 so that this can also be called without constructing an actual CALL_EXPR.
3649 The other arguments and return value are the same as for
3650 expand_builtin_memset. */
3651
3652 static rtx
3653 expand_builtin_memset_args (tree dest, tree val, tree len,
3654 rtx target, enum machine_mode mode, tree orig_exp)
3655 {
3656 tree fndecl, fn;
3657 enum built_in_function fcode;
3658 enum machine_mode val_mode;
3659 char c;
3660 unsigned int dest_align;
3661 rtx dest_mem, dest_addr, len_rtx;
3662 HOST_WIDE_INT expected_size = -1;
3663 unsigned int expected_align = 0;
3664 unsigned HOST_WIDE_INT min_size;
3665 unsigned HOST_WIDE_INT max_size;
3666 unsigned HOST_WIDE_INT probable_max_size;
3667
3668 dest_align = get_pointer_alignment (dest);
3669
3670 /* If DEST is not a pointer type, don't do this operation in-line. */
3671 if (dest_align == 0)
3672 return NULL_RTX;
3673
3674 if (currently_expanding_gimple_stmt)
3675 stringop_block_profile (currently_expanding_gimple_stmt,
3676 &expected_align, &expected_size);
3677
3678 if (expected_align < dest_align)
3679 expected_align = dest_align;
3680
3681 /* If the LEN parameter is zero, return DEST. */
3682 if (integer_zerop (len))
3683 {
3684 /* Evaluate and ignore VAL in case it has side-effects. */
3685 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3686 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3687 }
3688
3689 /* Stabilize the arguments in case we fail. */
3690 dest = builtin_save_expr (dest);
3691 val = builtin_save_expr (val);
3692 len = builtin_save_expr (len);
3693
3694 len_rtx = expand_normal (len);
3695 determine_block_size (len, len_rtx, &min_size, &max_size,
3696 &probable_max_size);
3697 dest_mem = get_memory_rtx (dest, len);
3698 val_mode = TYPE_MODE (unsigned_char_type_node);
3699
3700 if (TREE_CODE (val) != INTEGER_CST)
3701 {
3702 rtx val_rtx;
3703
3704 val_rtx = expand_normal (val);
3705 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3706
3707 /* Assume that we can memset by pieces if we can store
3708 * the coefficients by pieces (in the required modes).
3709 * We can't pass builtin_memset_gen_str as that emits RTL. */
3710 c = 1;
3711 if (tree_fits_uhwi_p (len)
3712 && can_store_by_pieces (tree_to_uhwi (len),
3713 builtin_memset_read_str, &c, dest_align,
3714 true))
3715 {
3716 val_rtx = force_reg (val_mode, val_rtx);
3717 store_by_pieces (dest_mem, tree_to_uhwi (len),
3718 builtin_memset_gen_str, val_rtx, dest_align,
3719 true, 0);
3720 }
3721 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3722 dest_align, expected_align,
3723 expected_size, min_size, max_size,
3724 probable_max_size))
3725 goto do_libcall;
3726
3727 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3728 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3729 return dest_mem;
3730 }
3731
3732 if (target_char_cast (val, &c))
3733 goto do_libcall;
3734
3735 if (c)
3736 {
3737 if (tree_fits_uhwi_p (len)
3738 && can_store_by_pieces (tree_to_uhwi (len),
3739 builtin_memset_read_str, &c, dest_align,
3740 true))
3741 store_by_pieces (dest_mem, tree_to_uhwi (len),
3742 builtin_memset_read_str, &c, dest_align, true, 0);
3743 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3744 gen_int_mode (c, val_mode),
3745 dest_align, expected_align,
3746 expected_size, min_size, max_size,
3747 probable_max_size))
3748 goto do_libcall;
3749
3750 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3751 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3752 return dest_mem;
3753 }
3754
3755 set_mem_align (dest_mem, dest_align);
3756 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3757 CALL_EXPR_TAILCALL (orig_exp)
3758 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3759 expected_align, expected_size,
3760 min_size, max_size,
3761 probable_max_size);
3762
3763 if (dest_addr == 0)
3764 {
3765 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3766 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3767 }
3768
3769 return dest_addr;
3770
3771 do_libcall:
3772 fndecl = get_callee_fndecl (orig_exp);
3773 fcode = DECL_FUNCTION_CODE (fndecl);
3774 if (fcode == BUILT_IN_MEMSET)
3775 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3776 dest, val, len);
3777 else if (fcode == BUILT_IN_BZERO)
3778 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3779 dest, len);
3780 else
3781 gcc_unreachable ();
3782 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3783 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3784 return expand_call (fn, target, target == const0_rtx);
3785 }
3786
3787 /* Expand expression EXP, which is a call to the bzero builtin. Return
3788 NULL_RTX if we failed the caller should emit a normal call. */
3789
3790 static rtx
3791 expand_builtin_bzero (tree exp)
3792 {
3793 tree dest, size;
3794 location_t loc = EXPR_LOCATION (exp);
3795
3796 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3797 return NULL_RTX;
3798
3799 dest = CALL_EXPR_ARG (exp, 0);
3800 size = CALL_EXPR_ARG (exp, 1);
3801
3802 /* New argument list transforming bzero(ptr x, int y) to
3803 memset(ptr x, int 0, size_t y). This is done this way
3804 so that if it isn't expanded inline, we fallback to
3805 calling bzero instead of memset. */
3806
3807 return expand_builtin_memset_args (dest, integer_zero_node,
3808 fold_convert_loc (loc,
3809 size_type_node, size),
3810 const0_rtx, VOIDmode, exp);
3811 }
3812
3813 /* Expand expression EXP, which is a call to the memcmp built-in function.
3814 Return NULL_RTX if we failed and the caller should emit a normal call,
3815 otherwise try to get the result in TARGET, if convenient (and in mode
3816 MODE, if that's convenient). */
3817
3818 static rtx
3819 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3820 ATTRIBUTE_UNUSED enum machine_mode mode)
3821 {
3822 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3823
3824 if (!validate_arglist (exp,
3825 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3826 return NULL_RTX;
3827
3828 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3829 implementing memcmp because it will stop if it encounters two
3830 zero bytes. */
3831 #if defined HAVE_cmpmemsi
3832 {
3833 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3834 rtx result;
3835 rtx insn;
3836 tree arg1 = CALL_EXPR_ARG (exp, 0);
3837 tree arg2 = CALL_EXPR_ARG (exp, 1);
3838 tree len = CALL_EXPR_ARG (exp, 2);
3839
3840 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3841 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3842 enum machine_mode insn_mode;
3843
3844 if (HAVE_cmpmemsi)
3845 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3846 else
3847 return NULL_RTX;
3848
3849 /* If we don't have POINTER_TYPE, call the function. */
3850 if (arg1_align == 0 || arg2_align == 0)
3851 return NULL_RTX;
3852
3853 /* Make a place to write the result of the instruction. */
3854 result = target;
3855 if (! (result != 0
3856 && REG_P (result) && GET_MODE (result) == insn_mode
3857 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3858 result = gen_reg_rtx (insn_mode);
3859
3860 arg1_rtx = get_memory_rtx (arg1, len);
3861 arg2_rtx = get_memory_rtx (arg2, len);
3862 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3863
3864 /* Set MEM_SIZE as appropriate. */
3865 if (CONST_INT_P (arg3_rtx))
3866 {
3867 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3868 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3869 }
3870
3871 if (HAVE_cmpmemsi)
3872 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3873 GEN_INT (MIN (arg1_align, arg2_align)));
3874 else
3875 gcc_unreachable ();
3876
3877 if (insn)
3878 emit_insn (insn);
3879 else
3880 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3881 TYPE_MODE (integer_type_node), 3,
3882 XEXP (arg1_rtx, 0), Pmode,
3883 XEXP (arg2_rtx, 0), Pmode,
3884 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3885 TYPE_UNSIGNED (sizetype)),
3886 TYPE_MODE (sizetype));
3887
3888 /* Return the value in the proper mode for this function. */
3889 mode = TYPE_MODE (TREE_TYPE (exp));
3890 if (GET_MODE (result) == mode)
3891 return result;
3892 else if (target != 0)
3893 {
3894 convert_move (target, result, 0);
3895 return target;
3896 }
3897 else
3898 return convert_to_mode (mode, result, 0);
3899 }
3900 #endif /* HAVE_cmpmemsi. */
3901
3902 return NULL_RTX;
3903 }
3904
3905 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3906 if we failed the caller should emit a normal call, otherwise try to get
3907 the result in TARGET, if convenient. */
3908
3909 static rtx
3910 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3911 {
3912 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3913 return NULL_RTX;
3914
3915 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3916 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3917 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3918 {
3919 rtx arg1_rtx, arg2_rtx;
3920 rtx result, insn = NULL_RTX;
3921 tree fndecl, fn;
3922 tree arg1 = CALL_EXPR_ARG (exp, 0);
3923 tree arg2 = CALL_EXPR_ARG (exp, 1);
3924
3925 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3926 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3927
3928 /* If we don't have POINTER_TYPE, call the function. */
3929 if (arg1_align == 0 || arg2_align == 0)
3930 return NULL_RTX;
3931
3932 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3933 arg1 = builtin_save_expr (arg1);
3934 arg2 = builtin_save_expr (arg2);
3935
3936 arg1_rtx = get_memory_rtx (arg1, NULL);
3937 arg2_rtx = get_memory_rtx (arg2, NULL);
3938
3939 #ifdef HAVE_cmpstrsi
3940 /* Try to call cmpstrsi. */
3941 if (HAVE_cmpstrsi)
3942 {
3943 enum machine_mode insn_mode
3944 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3945
3946 /* Make a place to write the result of the instruction. */
3947 result = target;
3948 if (! (result != 0
3949 && REG_P (result) && GET_MODE (result) == insn_mode
3950 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3951 result = gen_reg_rtx (insn_mode);
3952
3953 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3954 GEN_INT (MIN (arg1_align, arg2_align)));
3955 }
3956 #endif
3957 #ifdef HAVE_cmpstrnsi
3958 /* Try to determine at least one length and call cmpstrnsi. */
3959 if (!insn && HAVE_cmpstrnsi)
3960 {
3961 tree len;
3962 rtx arg3_rtx;
3963
3964 enum machine_mode insn_mode
3965 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3966 tree len1 = c_strlen (arg1, 1);
3967 tree len2 = c_strlen (arg2, 1);
3968
3969 if (len1)
3970 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3971 if (len2)
3972 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3973
3974 /* If we don't have a constant length for the first, use the length
3975 of the second, if we know it. We don't require a constant for
3976 this case; some cost analysis could be done if both are available
3977 but neither is constant. For now, assume they're equally cheap,
3978 unless one has side effects. If both strings have constant lengths,
3979 use the smaller. */
3980
3981 if (!len1)
3982 len = len2;
3983 else if (!len2)
3984 len = len1;
3985 else if (TREE_SIDE_EFFECTS (len1))
3986 len = len2;
3987 else if (TREE_SIDE_EFFECTS (len2))
3988 len = len1;
3989 else if (TREE_CODE (len1) != INTEGER_CST)
3990 len = len2;
3991 else if (TREE_CODE (len2) != INTEGER_CST)
3992 len = len1;
3993 else if (tree_int_cst_lt (len1, len2))
3994 len = len1;
3995 else
3996 len = len2;
3997
3998 /* If both arguments have side effects, we cannot optimize. */
3999 if (!len || TREE_SIDE_EFFECTS (len))
4000 goto do_libcall;
4001
4002 arg3_rtx = expand_normal (len);
4003
4004 /* Make a place to write the result of the instruction. */
4005 result = target;
4006 if (! (result != 0
4007 && REG_P (result) && GET_MODE (result) == insn_mode
4008 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4009 result = gen_reg_rtx (insn_mode);
4010
4011 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4012 GEN_INT (MIN (arg1_align, arg2_align)));
4013 }
4014 #endif
4015
4016 if (insn)
4017 {
4018 enum machine_mode mode;
4019 emit_insn (insn);
4020
4021 /* Return the value in the proper mode for this function. */
4022 mode = TYPE_MODE (TREE_TYPE (exp));
4023 if (GET_MODE (result) == mode)
4024 return result;
4025 if (target == 0)
4026 return convert_to_mode (mode, result, 0);
4027 convert_move (target, result, 0);
4028 return target;
4029 }
4030
4031 /* Expand the library call ourselves using a stabilized argument
4032 list to avoid re-evaluating the function's arguments twice. */
4033 #ifdef HAVE_cmpstrnsi
4034 do_libcall:
4035 #endif
4036 fndecl = get_callee_fndecl (exp);
4037 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4038 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4039 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4040 return expand_call (fn, target, target == const0_rtx);
4041 }
4042 #endif
4043 return NULL_RTX;
4044 }
4045
4046 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4047 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4048 the result in TARGET, if convenient. */
4049
4050 static rtx
4051 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4052 ATTRIBUTE_UNUSED enum machine_mode mode)
4053 {
4054 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4055
4056 if (!validate_arglist (exp,
4057 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4058 return NULL_RTX;
4059
4060 /* If c_strlen can determine an expression for one of the string
4061 lengths, and it doesn't have side effects, then emit cmpstrnsi
4062 using length MIN(strlen(string)+1, arg3). */
4063 #ifdef HAVE_cmpstrnsi
4064 if (HAVE_cmpstrnsi)
4065 {
4066 tree len, len1, len2;
4067 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4068 rtx result, insn;
4069 tree fndecl, fn;
4070 tree arg1 = CALL_EXPR_ARG (exp, 0);
4071 tree arg2 = CALL_EXPR_ARG (exp, 1);
4072 tree arg3 = CALL_EXPR_ARG (exp, 2);
4073
4074 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4075 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4076 enum machine_mode insn_mode
4077 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4078
4079 len1 = c_strlen (arg1, 1);
4080 len2 = c_strlen (arg2, 1);
4081
4082 if (len1)
4083 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4084 if (len2)
4085 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4086
4087 /* If we don't have a constant length for the first, use the length
4088 of the second, if we know it. We don't require a constant for
4089 this case; some cost analysis could be done if both are available
4090 but neither is constant. For now, assume they're equally cheap,
4091 unless one has side effects. If both strings have constant lengths,
4092 use the smaller. */
4093
4094 if (!len1)
4095 len = len2;
4096 else if (!len2)
4097 len = len1;
4098 else if (TREE_SIDE_EFFECTS (len1))
4099 len = len2;
4100 else if (TREE_SIDE_EFFECTS (len2))
4101 len = len1;
4102 else if (TREE_CODE (len1) != INTEGER_CST)
4103 len = len2;
4104 else if (TREE_CODE (len2) != INTEGER_CST)
4105 len = len1;
4106 else if (tree_int_cst_lt (len1, len2))
4107 len = len1;
4108 else
4109 len = len2;
4110
4111 /* If both arguments have side effects, we cannot optimize. */
4112 if (!len || TREE_SIDE_EFFECTS (len))
4113 return NULL_RTX;
4114
4115 /* The actual new length parameter is MIN(len,arg3). */
4116 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4117 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4118
4119 /* If we don't have POINTER_TYPE, call the function. */
4120 if (arg1_align == 0 || arg2_align == 0)
4121 return NULL_RTX;
4122
4123 /* Make a place to write the result of the instruction. */
4124 result = target;
4125 if (! (result != 0
4126 && REG_P (result) && GET_MODE (result) == insn_mode
4127 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4128 result = gen_reg_rtx (insn_mode);
4129
4130 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4131 arg1 = builtin_save_expr (arg1);
4132 arg2 = builtin_save_expr (arg2);
4133 len = builtin_save_expr (len);
4134
4135 arg1_rtx = get_memory_rtx (arg1, len);
4136 arg2_rtx = get_memory_rtx (arg2, len);
4137 arg3_rtx = expand_normal (len);
4138 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4139 GEN_INT (MIN (arg1_align, arg2_align)));
4140 if (insn)
4141 {
4142 emit_insn (insn);
4143
4144 /* Return the value in the proper mode for this function. */
4145 mode = TYPE_MODE (TREE_TYPE (exp));
4146 if (GET_MODE (result) == mode)
4147 return result;
4148 if (target == 0)
4149 return convert_to_mode (mode, result, 0);
4150 convert_move (target, result, 0);
4151 return target;
4152 }
4153
4154 /* Expand the library call ourselves using a stabilized argument
4155 list to avoid re-evaluating the function's arguments twice. */
4156 fndecl = get_callee_fndecl (exp);
4157 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4158 arg1, arg2, len);
4159 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4160 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4161 return expand_call (fn, target, target == const0_rtx);
4162 }
4163 #endif
4164 return NULL_RTX;
4165 }
4166
4167 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4168 if that's convenient. */
4169
4170 rtx
4171 expand_builtin_saveregs (void)
4172 {
4173 rtx val, seq;
4174
4175 /* Don't do __builtin_saveregs more than once in a function.
4176 Save the result of the first call and reuse it. */
4177 if (saveregs_value != 0)
4178 return saveregs_value;
4179
4180 /* When this function is called, it means that registers must be
4181 saved on entry to this function. So we migrate the call to the
4182 first insn of this function. */
4183
4184 start_sequence ();
4185
4186 /* Do whatever the machine needs done in this case. */
4187 val = targetm.calls.expand_builtin_saveregs ();
4188
4189 seq = get_insns ();
4190 end_sequence ();
4191
4192 saveregs_value = val;
4193
4194 /* Put the insns after the NOTE that starts the function. If this
4195 is inside a start_sequence, make the outer-level insn chain current, so
4196 the code is placed at the start of the function. */
4197 push_topmost_sequence ();
4198 emit_insn_after (seq, entry_of_function ());
4199 pop_topmost_sequence ();
4200
4201 return val;
4202 }
4203
4204 /* Expand a call to __builtin_next_arg. */
4205
4206 static rtx
4207 expand_builtin_next_arg (void)
4208 {
4209 /* Checking arguments is already done in fold_builtin_next_arg
4210 that must be called before this function. */
4211 return expand_binop (ptr_mode, add_optab,
4212 crtl->args.internal_arg_pointer,
4213 crtl->args.arg_offset_rtx,
4214 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4215 }
4216
4217 /* Make it easier for the backends by protecting the valist argument
4218 from multiple evaluations. */
4219
4220 static tree
4221 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4222 {
4223 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4224
4225 /* The current way of determining the type of valist is completely
4226 bogus. We should have the information on the va builtin instead. */
4227 if (!vatype)
4228 vatype = targetm.fn_abi_va_list (cfun->decl);
4229
4230 if (TREE_CODE (vatype) == ARRAY_TYPE)
4231 {
4232 if (TREE_SIDE_EFFECTS (valist))
4233 valist = save_expr (valist);
4234
4235 /* For this case, the backends will be expecting a pointer to
4236 vatype, but it's possible we've actually been given an array
4237 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4238 So fix it. */
4239 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4240 {
4241 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4242 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4243 }
4244 }
4245 else
4246 {
4247 tree pt = build_pointer_type (vatype);
4248
4249 if (! needs_lvalue)
4250 {
4251 if (! TREE_SIDE_EFFECTS (valist))
4252 return valist;
4253
4254 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4255 TREE_SIDE_EFFECTS (valist) = 1;
4256 }
4257
4258 if (TREE_SIDE_EFFECTS (valist))
4259 valist = save_expr (valist);
4260 valist = fold_build2_loc (loc, MEM_REF,
4261 vatype, valist, build_int_cst (pt, 0));
4262 }
4263
4264 return valist;
4265 }
4266
4267 /* The "standard" definition of va_list is void*. */
4268
4269 tree
4270 std_build_builtin_va_list (void)
4271 {
4272 return ptr_type_node;
4273 }
4274
4275 /* The "standard" abi va_list is va_list_type_node. */
4276
4277 tree
4278 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4279 {
4280 return va_list_type_node;
4281 }
4282
4283 /* The "standard" type of va_list is va_list_type_node. */
4284
4285 tree
4286 std_canonical_va_list_type (tree type)
4287 {
4288 tree wtype, htype;
4289
4290 if (INDIRECT_REF_P (type))
4291 type = TREE_TYPE (type);
4292 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4293 type = TREE_TYPE (type);
4294 wtype = va_list_type_node;
4295 htype = type;
4296 /* Treat structure va_list types. */
4297 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4298 htype = TREE_TYPE (htype);
4299 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4300 {
4301 /* If va_list is an array type, the argument may have decayed
4302 to a pointer type, e.g. by being passed to another function.
4303 In that case, unwrap both types so that we can compare the
4304 underlying records. */
4305 if (TREE_CODE (htype) == ARRAY_TYPE
4306 || POINTER_TYPE_P (htype))
4307 {
4308 wtype = TREE_TYPE (wtype);
4309 htype = TREE_TYPE (htype);
4310 }
4311 }
4312 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4313 return va_list_type_node;
4314
4315 return NULL_TREE;
4316 }
4317
4318 /* The "standard" implementation of va_start: just assign `nextarg' to
4319 the variable. */
4320
4321 void
4322 std_expand_builtin_va_start (tree valist, rtx nextarg)
4323 {
4324 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4325 convert_move (va_r, nextarg, 0);
4326 }
4327
4328 /* Expand EXP, a call to __builtin_va_start. */
4329
4330 static rtx
4331 expand_builtin_va_start (tree exp)
4332 {
4333 rtx nextarg;
4334 tree valist;
4335 location_t loc = EXPR_LOCATION (exp);
4336
4337 if (call_expr_nargs (exp) < 2)
4338 {
4339 error_at (loc, "too few arguments to function %<va_start%>");
4340 return const0_rtx;
4341 }
4342
4343 if (fold_builtin_next_arg (exp, true))
4344 return const0_rtx;
4345
4346 nextarg = expand_builtin_next_arg ();
4347 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4348
4349 if (targetm.expand_builtin_va_start)
4350 targetm.expand_builtin_va_start (valist, nextarg);
4351 else
4352 std_expand_builtin_va_start (valist, nextarg);
4353
4354 return const0_rtx;
4355 }
4356
4357 /* Expand EXP, a call to __builtin_va_end. */
4358
4359 static rtx
4360 expand_builtin_va_end (tree exp)
4361 {
4362 tree valist = CALL_EXPR_ARG (exp, 0);
4363
4364 /* Evaluate for side effects, if needed. I hate macros that don't
4365 do that. */
4366 if (TREE_SIDE_EFFECTS (valist))
4367 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4368
4369 return const0_rtx;
4370 }
4371
4372 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4373 builtin rather than just as an assignment in stdarg.h because of the
4374 nastiness of array-type va_list types. */
4375
4376 static rtx
4377 expand_builtin_va_copy (tree exp)
4378 {
4379 tree dst, src, t;
4380 location_t loc = EXPR_LOCATION (exp);
4381
4382 dst = CALL_EXPR_ARG (exp, 0);
4383 src = CALL_EXPR_ARG (exp, 1);
4384
4385 dst = stabilize_va_list_loc (loc, dst, 1);
4386 src = stabilize_va_list_loc (loc, src, 0);
4387
4388 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4389
4390 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4391 {
4392 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4393 TREE_SIDE_EFFECTS (t) = 1;
4394 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4395 }
4396 else
4397 {
4398 rtx dstb, srcb, size;
4399
4400 /* Evaluate to pointers. */
4401 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4402 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4403 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4404 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4405
4406 dstb = convert_memory_address (Pmode, dstb);
4407 srcb = convert_memory_address (Pmode, srcb);
4408
4409 /* "Dereference" to BLKmode memories. */
4410 dstb = gen_rtx_MEM (BLKmode, dstb);
4411 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4412 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4413 srcb = gen_rtx_MEM (BLKmode, srcb);
4414 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4415 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4416
4417 /* Copy. */
4418 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4419 }
4420
4421 return const0_rtx;
4422 }
4423
4424 /* Expand a call to one of the builtin functions __builtin_frame_address or
4425 __builtin_return_address. */
4426
4427 static rtx
4428 expand_builtin_frame_address (tree fndecl, tree exp)
4429 {
4430 /* The argument must be a nonnegative integer constant.
4431 It counts the number of frames to scan up the stack.
4432 The value is the return address saved in that frame. */
4433 if (call_expr_nargs (exp) == 0)
4434 /* Warning about missing arg was already issued. */
4435 return const0_rtx;
4436 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4437 {
4438 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4439 error ("invalid argument to %<__builtin_frame_address%>");
4440 else
4441 error ("invalid argument to %<__builtin_return_address%>");
4442 return const0_rtx;
4443 }
4444 else
4445 {
4446 rtx tem
4447 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4448 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4449
4450 /* Some ports cannot access arbitrary stack frames. */
4451 if (tem == NULL)
4452 {
4453 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4454 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4455 else
4456 warning (0, "unsupported argument to %<__builtin_return_address%>");
4457 return const0_rtx;
4458 }
4459
4460 /* For __builtin_frame_address, return what we've got. */
4461 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4462 return tem;
4463
4464 if (!REG_P (tem)
4465 && ! CONSTANT_P (tem))
4466 tem = copy_addr_to_reg (tem);
4467 return tem;
4468 }
4469 }
4470
4471 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4472 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4473 is the same as for allocate_dynamic_stack_space. */
4474
4475 static rtx
4476 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4477 {
4478 rtx op0;
4479 rtx result;
4480 bool valid_arglist;
4481 unsigned int align;
4482 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4483 == BUILT_IN_ALLOCA_WITH_ALIGN);
4484
4485 valid_arglist
4486 = (alloca_with_align
4487 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4488 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4489
4490 if (!valid_arglist)
4491 return NULL_RTX;
4492
4493 /* Compute the argument. */
4494 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4495
4496 /* Compute the alignment. */
4497 align = (alloca_with_align
4498 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4499 : BIGGEST_ALIGNMENT);
4500
4501 /* Allocate the desired space. */
4502 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4503 result = convert_memory_address (ptr_mode, result);
4504
4505 return result;
4506 }
4507
4508 /* Expand a call to bswap builtin in EXP.
4509 Return NULL_RTX if a normal call should be emitted rather than expanding the
4510 function in-line. If convenient, the result should be placed in TARGET.
4511 SUBTARGET may be used as the target for computing one of EXP's operands. */
4512
4513 static rtx
4514 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4515 rtx subtarget)
4516 {
4517 tree arg;
4518 rtx op0;
4519
4520 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4521 return NULL_RTX;
4522
4523 arg = CALL_EXPR_ARG (exp, 0);
4524 op0 = expand_expr (arg,
4525 subtarget && GET_MODE (subtarget) == target_mode
4526 ? subtarget : NULL_RTX,
4527 target_mode, EXPAND_NORMAL);
4528 if (GET_MODE (op0) != target_mode)
4529 op0 = convert_to_mode (target_mode, op0, 1);
4530
4531 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4532
4533 gcc_assert (target);
4534
4535 return convert_to_mode (target_mode, target, 1);
4536 }
4537
4538 /* Expand a call to a unary builtin in EXP.
4539 Return NULL_RTX if a normal call should be emitted rather than expanding the
4540 function in-line. If convenient, the result should be placed in TARGET.
4541 SUBTARGET may be used as the target for computing one of EXP's operands. */
4542
4543 static rtx
4544 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4545 rtx subtarget, optab op_optab)
4546 {
4547 rtx op0;
4548
4549 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4550 return NULL_RTX;
4551
4552 /* Compute the argument. */
4553 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4554 (subtarget
4555 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4556 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4557 VOIDmode, EXPAND_NORMAL);
4558 /* Compute op, into TARGET if possible.
4559 Set TARGET to wherever the result comes back. */
4560 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4561 op_optab, op0, target, op_optab != clrsb_optab);
4562 gcc_assert (target);
4563
4564 return convert_to_mode (target_mode, target, 0);
4565 }
4566
4567 /* Expand a call to __builtin_expect. We just return our argument
4568 as the builtin_expect semantic should've been already executed by
4569 tree branch prediction pass. */
4570
4571 static rtx
4572 expand_builtin_expect (tree exp, rtx target)
4573 {
4574 tree arg;
4575
4576 if (call_expr_nargs (exp) < 2)
4577 return const0_rtx;
4578 arg = CALL_EXPR_ARG (exp, 0);
4579
4580 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4581 /* When guessing was done, the hints should be already stripped away. */
4582 gcc_assert (!flag_guess_branch_prob
4583 || optimize == 0 || seen_error ());
4584 return target;
4585 }
4586
4587 /* Expand a call to __builtin_assume_aligned. We just return our first
4588 argument as the builtin_assume_aligned semantic should've been already
4589 executed by CCP. */
4590
4591 static rtx
4592 expand_builtin_assume_aligned (tree exp, rtx target)
4593 {
4594 if (call_expr_nargs (exp) < 2)
4595 return const0_rtx;
4596 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4597 EXPAND_NORMAL);
4598 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4599 && (call_expr_nargs (exp) < 3
4600 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4601 return target;
4602 }
4603
4604 void
4605 expand_builtin_trap (void)
4606 {
4607 #ifdef HAVE_trap
4608 if (HAVE_trap)
4609 {
4610 rtx insn = emit_insn (gen_trap ());
4611 /* For trap insns when not accumulating outgoing args force
4612 REG_ARGS_SIZE note to prevent crossjumping of calls with
4613 different args sizes. */
4614 if (!ACCUMULATE_OUTGOING_ARGS)
4615 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4616 }
4617 else
4618 #endif
4619 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4620 emit_barrier ();
4621 }
4622
4623 /* Expand a call to __builtin_unreachable. We do nothing except emit
4624 a barrier saying that control flow will not pass here.
4625
4626 It is the responsibility of the program being compiled to ensure
4627 that control flow does never reach __builtin_unreachable. */
4628 static void
4629 expand_builtin_unreachable (void)
4630 {
4631 emit_barrier ();
4632 }
4633
4634 /* Expand EXP, a call to fabs, fabsf or fabsl.
4635 Return NULL_RTX if a normal call should be emitted rather than expanding
4636 the function inline. If convenient, the result should be placed
4637 in TARGET. SUBTARGET may be used as the target for computing
4638 the operand. */
4639
4640 static rtx
4641 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4642 {
4643 enum machine_mode mode;
4644 tree arg;
4645 rtx op0;
4646
4647 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4648 return NULL_RTX;
4649
4650 arg = CALL_EXPR_ARG (exp, 0);
4651 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4652 mode = TYPE_MODE (TREE_TYPE (arg));
4653 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4654 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4655 }
4656
4657 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4658 Return NULL is a normal call should be emitted rather than expanding the
4659 function inline. If convenient, the result should be placed in TARGET.
4660 SUBTARGET may be used as the target for computing the operand. */
4661
4662 static rtx
4663 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4664 {
4665 rtx op0, op1;
4666 tree arg;
4667
4668 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4669 return NULL_RTX;
4670
4671 arg = CALL_EXPR_ARG (exp, 0);
4672 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4673
4674 arg = CALL_EXPR_ARG (exp, 1);
4675 op1 = expand_normal (arg);
4676
4677 return expand_copysign (op0, op1, target);
4678 }
4679
4680 /* Create a new constant string literal and return a char* pointer to it.
4681 The STRING_CST value is the LEN characters at STR. */
4682 tree
4683 build_string_literal (int len, const char *str)
4684 {
4685 tree t, elem, index, type;
4686
4687 t = build_string (len, str);
4688 elem = build_type_variant (char_type_node, 1, 0);
4689 index = build_index_type (size_int (len - 1));
4690 type = build_array_type (elem, index);
4691 TREE_TYPE (t) = type;
4692 TREE_CONSTANT (t) = 1;
4693 TREE_READONLY (t) = 1;
4694 TREE_STATIC (t) = 1;
4695
4696 type = build_pointer_type (elem);
4697 t = build1 (ADDR_EXPR, type,
4698 build4 (ARRAY_REF, elem,
4699 t, integer_zero_node, NULL_TREE, NULL_TREE));
4700 return t;
4701 }
4702
4703 /* Expand a call to __builtin___clear_cache. */
4704
4705 static rtx
4706 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4707 {
4708 #ifndef HAVE_clear_cache
4709 #ifdef CLEAR_INSN_CACHE
4710 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4711 does something. Just do the default expansion to a call to
4712 __clear_cache(). */
4713 return NULL_RTX;
4714 #else
4715 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4716 does nothing. There is no need to call it. Do nothing. */
4717 return const0_rtx;
4718 #endif /* CLEAR_INSN_CACHE */
4719 #else
4720 /* We have a "clear_cache" insn, and it will handle everything. */
4721 tree begin, end;
4722 rtx begin_rtx, end_rtx;
4723
4724 /* We must not expand to a library call. If we did, any
4725 fallback library function in libgcc that might contain a call to
4726 __builtin___clear_cache() would recurse infinitely. */
4727 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4728 {
4729 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4730 return const0_rtx;
4731 }
4732
4733 if (HAVE_clear_cache)
4734 {
4735 struct expand_operand ops[2];
4736
4737 begin = CALL_EXPR_ARG (exp, 0);
4738 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4739
4740 end = CALL_EXPR_ARG (exp, 1);
4741 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4742
4743 create_address_operand (&ops[0], begin_rtx);
4744 create_address_operand (&ops[1], end_rtx);
4745 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4746 return const0_rtx;
4747 }
4748 return const0_rtx;
4749 #endif /* HAVE_clear_cache */
4750 }
4751
4752 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4753
4754 static rtx
4755 round_trampoline_addr (rtx tramp)
4756 {
4757 rtx temp, addend, mask;
4758
4759 /* If we don't need too much alignment, we'll have been guaranteed
4760 proper alignment by get_trampoline_type. */
4761 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4762 return tramp;
4763
4764 /* Round address up to desired boundary. */
4765 temp = gen_reg_rtx (Pmode);
4766 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4767 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4768
4769 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4770 temp, 0, OPTAB_LIB_WIDEN);
4771 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4772 temp, 0, OPTAB_LIB_WIDEN);
4773
4774 return tramp;
4775 }
4776
4777 static rtx
4778 expand_builtin_init_trampoline (tree exp, bool onstack)
4779 {
4780 tree t_tramp, t_func, t_chain;
4781 rtx m_tramp, r_tramp, r_chain, tmp;
4782
4783 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4784 POINTER_TYPE, VOID_TYPE))
4785 return NULL_RTX;
4786
4787 t_tramp = CALL_EXPR_ARG (exp, 0);
4788 t_func = CALL_EXPR_ARG (exp, 1);
4789 t_chain = CALL_EXPR_ARG (exp, 2);
4790
4791 r_tramp = expand_normal (t_tramp);
4792 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4793 MEM_NOTRAP_P (m_tramp) = 1;
4794
4795 /* If ONSTACK, the TRAMP argument should be the address of a field
4796 within the local function's FRAME decl. Either way, let's see if
4797 we can fill in the MEM_ATTRs for this memory. */
4798 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4799 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4800
4801 /* Creator of a heap trampoline is responsible for making sure the
4802 address is aligned to at least STACK_BOUNDARY. Normally malloc
4803 will ensure this anyhow. */
4804 tmp = round_trampoline_addr (r_tramp);
4805 if (tmp != r_tramp)
4806 {
4807 m_tramp = change_address (m_tramp, BLKmode, tmp);
4808 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4809 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4810 }
4811
4812 /* The FUNC argument should be the address of the nested function.
4813 Extract the actual function decl to pass to the hook. */
4814 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4815 t_func = TREE_OPERAND (t_func, 0);
4816 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4817
4818 r_chain = expand_normal (t_chain);
4819
4820 /* Generate insns to initialize the trampoline. */
4821 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4822
4823 if (onstack)
4824 {
4825 trampolines_created = 1;
4826
4827 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4828 "trampoline generated for nested function %qD", t_func);
4829 }
4830
4831 return const0_rtx;
4832 }
4833
4834 static rtx
4835 expand_builtin_adjust_trampoline (tree exp)
4836 {
4837 rtx tramp;
4838
4839 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4840 return NULL_RTX;
4841
4842 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4843 tramp = round_trampoline_addr (tramp);
4844 if (targetm.calls.trampoline_adjust_address)
4845 tramp = targetm.calls.trampoline_adjust_address (tramp);
4846
4847 return tramp;
4848 }
4849
4850 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4851 function. The function first checks whether the back end provides
4852 an insn to implement signbit for the respective mode. If not, it
4853 checks whether the floating point format of the value is such that
4854 the sign bit can be extracted. If that is not the case, the
4855 function returns NULL_RTX to indicate that a normal call should be
4856 emitted rather than expanding the function in-line. EXP is the
4857 expression that is a call to the builtin function; if convenient,
4858 the result should be placed in TARGET. */
4859 static rtx
4860 expand_builtin_signbit (tree exp, rtx target)
4861 {
4862 const struct real_format *fmt;
4863 enum machine_mode fmode, imode, rmode;
4864 tree arg;
4865 int word, bitpos;
4866 enum insn_code icode;
4867 rtx temp;
4868 location_t loc = EXPR_LOCATION (exp);
4869
4870 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4871 return NULL_RTX;
4872
4873 arg = CALL_EXPR_ARG (exp, 0);
4874 fmode = TYPE_MODE (TREE_TYPE (arg));
4875 rmode = TYPE_MODE (TREE_TYPE (exp));
4876 fmt = REAL_MODE_FORMAT (fmode);
4877
4878 arg = builtin_save_expr (arg);
4879
4880 /* Expand the argument yielding a RTX expression. */
4881 temp = expand_normal (arg);
4882
4883 /* Check if the back end provides an insn that handles signbit for the
4884 argument's mode. */
4885 icode = optab_handler (signbit_optab, fmode);
4886 if (icode != CODE_FOR_nothing)
4887 {
4888 rtx last = get_last_insn ();
4889 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4890 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4891 return target;
4892 delete_insns_since (last);
4893 }
4894
4895 /* For floating point formats without a sign bit, implement signbit
4896 as "ARG < 0.0". */
4897 bitpos = fmt->signbit_ro;
4898 if (bitpos < 0)
4899 {
4900 /* But we can't do this if the format supports signed zero. */
4901 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4902 return NULL_RTX;
4903
4904 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4905 build_real (TREE_TYPE (arg), dconst0));
4906 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4907 }
4908
4909 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4910 {
4911 imode = int_mode_for_mode (fmode);
4912 if (imode == BLKmode)
4913 return NULL_RTX;
4914 temp = gen_lowpart (imode, temp);
4915 }
4916 else
4917 {
4918 imode = word_mode;
4919 /* Handle targets with different FP word orders. */
4920 if (FLOAT_WORDS_BIG_ENDIAN)
4921 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4922 else
4923 word = bitpos / BITS_PER_WORD;
4924 temp = operand_subword_force (temp, word, fmode);
4925 bitpos = bitpos % BITS_PER_WORD;
4926 }
4927
4928 /* Force the intermediate word_mode (or narrower) result into a
4929 register. This avoids attempting to create paradoxical SUBREGs
4930 of floating point modes below. */
4931 temp = force_reg (imode, temp);
4932
4933 /* If the bitpos is within the "result mode" lowpart, the operation
4934 can be implement with a single bitwise AND. Otherwise, we need
4935 a right shift and an AND. */
4936
4937 if (bitpos < GET_MODE_BITSIZE (rmode))
4938 {
4939 double_int mask = double_int_zero.set_bit (bitpos);
4940
4941 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4942 temp = gen_lowpart (rmode, temp);
4943 temp = expand_binop (rmode, and_optab, temp,
4944 immed_double_int_const (mask, rmode),
4945 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4946 }
4947 else
4948 {
4949 /* Perform a logical right shift to place the signbit in the least
4950 significant bit, then truncate the result to the desired mode
4951 and mask just this bit. */
4952 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4953 temp = gen_lowpart (rmode, temp);
4954 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4955 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4956 }
4957
4958 return temp;
4959 }
4960
4961 /* Expand fork or exec calls. TARGET is the desired target of the
4962 call. EXP is the call. FN is the
4963 identificator of the actual function. IGNORE is nonzero if the
4964 value is to be ignored. */
4965
4966 static rtx
4967 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4968 {
4969 tree id, decl;
4970 tree call;
4971
4972 /* If we are not profiling, just call the function. */
4973 if (!profile_arc_flag)
4974 return NULL_RTX;
4975
4976 /* Otherwise call the wrapper. This should be equivalent for the rest of
4977 compiler, so the code does not diverge, and the wrapper may run the
4978 code necessary for keeping the profiling sane. */
4979
4980 switch (DECL_FUNCTION_CODE (fn))
4981 {
4982 case BUILT_IN_FORK:
4983 id = get_identifier ("__gcov_fork");
4984 break;
4985
4986 case BUILT_IN_EXECL:
4987 id = get_identifier ("__gcov_execl");
4988 break;
4989
4990 case BUILT_IN_EXECV:
4991 id = get_identifier ("__gcov_execv");
4992 break;
4993
4994 case BUILT_IN_EXECLP:
4995 id = get_identifier ("__gcov_execlp");
4996 break;
4997
4998 case BUILT_IN_EXECLE:
4999 id = get_identifier ("__gcov_execle");
5000 break;
5001
5002 case BUILT_IN_EXECVP:
5003 id = get_identifier ("__gcov_execvp");
5004 break;
5005
5006 case BUILT_IN_EXECVE:
5007 id = get_identifier ("__gcov_execve");
5008 break;
5009
5010 default:
5011 gcc_unreachable ();
5012 }
5013
5014 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5015 FUNCTION_DECL, id, TREE_TYPE (fn));
5016 DECL_EXTERNAL (decl) = 1;
5017 TREE_PUBLIC (decl) = 1;
5018 DECL_ARTIFICIAL (decl) = 1;
5019 TREE_NOTHROW (decl) = 1;
5020 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5021 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5022 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5023 return expand_call (call, target, ignore);
5024 }
5025
5026
5027 \f
5028 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5029 the pointer in these functions is void*, the tree optimizers may remove
5030 casts. The mode computed in expand_builtin isn't reliable either, due
5031 to __sync_bool_compare_and_swap.
5032
5033 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5034 group of builtins. This gives us log2 of the mode size. */
5035
5036 static inline enum machine_mode
5037 get_builtin_sync_mode (int fcode_diff)
5038 {
5039 /* The size is not negotiable, so ask not to get BLKmode in return
5040 if the target indicates that a smaller size would be better. */
5041 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5042 }
5043
5044 /* Expand the memory expression LOC and return the appropriate memory operand
5045 for the builtin_sync operations. */
5046
5047 static rtx
5048 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5049 {
5050 rtx addr, mem;
5051
5052 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5053 addr = convert_memory_address (Pmode, addr);
5054
5055 /* Note that we explicitly do not want any alias information for this
5056 memory, so that we kill all other live memories. Otherwise we don't
5057 satisfy the full barrier semantics of the intrinsic. */
5058 mem = validize_mem (gen_rtx_MEM (mode, addr));
5059
5060 /* The alignment needs to be at least according to that of the mode. */
5061 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5062 get_pointer_alignment (loc)));
5063 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5064 MEM_VOLATILE_P (mem) = 1;
5065
5066 return mem;
5067 }
5068
5069 /* Make sure an argument is in the right mode.
5070 EXP is the tree argument.
5071 MODE is the mode it should be in. */
5072
5073 static rtx
5074 expand_expr_force_mode (tree exp, enum machine_mode mode)
5075 {
5076 rtx val;
5077 enum machine_mode old_mode;
5078
5079 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5080 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5081 of CONST_INTs, where we know the old_mode only from the call argument. */
5082
5083 old_mode = GET_MODE (val);
5084 if (old_mode == VOIDmode)
5085 old_mode = TYPE_MODE (TREE_TYPE (exp));
5086 val = convert_modes (mode, old_mode, val, 1);
5087 return val;
5088 }
5089
5090
5091 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5092 EXP is the CALL_EXPR. CODE is the rtx code
5093 that corresponds to the arithmetic or logical operation from the name;
5094 an exception here is that NOT actually means NAND. TARGET is an optional
5095 place for us to store the results; AFTER is true if this is the
5096 fetch_and_xxx form. */
5097
5098 static rtx
5099 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5100 enum rtx_code code, bool after,
5101 rtx target)
5102 {
5103 rtx val, mem;
5104 location_t loc = EXPR_LOCATION (exp);
5105
5106 if (code == NOT && warn_sync_nand)
5107 {
5108 tree fndecl = get_callee_fndecl (exp);
5109 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5110
5111 static bool warned_f_a_n, warned_n_a_f;
5112
5113 switch (fcode)
5114 {
5115 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5116 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5117 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5118 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5119 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5120 if (warned_f_a_n)
5121 break;
5122
5123 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5124 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5125 warned_f_a_n = true;
5126 break;
5127
5128 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5129 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5130 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5131 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5132 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5133 if (warned_n_a_f)
5134 break;
5135
5136 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5137 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5138 warned_n_a_f = true;
5139 break;
5140
5141 default:
5142 gcc_unreachable ();
5143 }
5144 }
5145
5146 /* Expand the operands. */
5147 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5148 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5149
5150 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5151 after);
5152 }
5153
5154 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5155 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5156 true if this is the boolean form. TARGET is a place for us to store the
5157 results; this is NOT optional if IS_BOOL is true. */
5158
5159 static rtx
5160 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5161 bool is_bool, rtx target)
5162 {
5163 rtx old_val, new_val, mem;
5164 rtx *pbool, *poval;
5165
5166 /* Expand the operands. */
5167 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5168 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5169 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5170
5171 pbool = poval = NULL;
5172 if (target != const0_rtx)
5173 {
5174 if (is_bool)
5175 pbool = &target;
5176 else
5177 poval = &target;
5178 }
5179 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5180 false, MEMMODEL_SEQ_CST,
5181 MEMMODEL_SEQ_CST))
5182 return NULL_RTX;
5183
5184 return target;
5185 }
5186
5187 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5188 general form is actually an atomic exchange, and some targets only
5189 support a reduced form with the second argument being a constant 1.
5190 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5191 the results. */
5192
5193 static rtx
5194 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5195 rtx target)
5196 {
5197 rtx val, mem;
5198
5199 /* Expand the operands. */
5200 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5201 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5202
5203 return expand_sync_lock_test_and_set (target, mem, val);
5204 }
5205
5206 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5207
5208 static void
5209 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5210 {
5211 rtx mem;
5212
5213 /* Expand the operands. */
5214 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5215
5216 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5217 }
5218
5219 /* Given an integer representing an ``enum memmodel'', verify its
5220 correctness and return the memory model enum. */
5221
5222 static enum memmodel
5223 get_memmodel (tree exp)
5224 {
5225 rtx op;
5226 unsigned HOST_WIDE_INT val;
5227
5228 /* If the parameter is not a constant, it's a run time value so we'll just
5229 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5230 if (TREE_CODE (exp) != INTEGER_CST)
5231 return MEMMODEL_SEQ_CST;
5232
5233 op = expand_normal (exp);
5234
5235 val = INTVAL (op);
5236 if (targetm.memmodel_check)
5237 val = targetm.memmodel_check (val);
5238 else if (val & ~MEMMODEL_MASK)
5239 {
5240 warning (OPT_Winvalid_memory_model,
5241 "Unknown architecture specifier in memory model to builtin.");
5242 return MEMMODEL_SEQ_CST;
5243 }
5244
5245 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5246 {
5247 warning (OPT_Winvalid_memory_model,
5248 "invalid memory model argument to builtin");
5249 return MEMMODEL_SEQ_CST;
5250 }
5251
5252 return (enum memmodel) val;
5253 }
5254
5255 /* Expand the __atomic_exchange intrinsic:
5256 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5257 EXP is the CALL_EXPR.
5258 TARGET is an optional place for us to store the results. */
5259
5260 static rtx
5261 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5262 {
5263 rtx val, mem;
5264 enum memmodel model;
5265
5266 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5267 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5268 {
5269 error ("invalid memory model for %<__atomic_exchange%>");
5270 return NULL_RTX;
5271 }
5272
5273 if (!flag_inline_atomics)
5274 return NULL_RTX;
5275
5276 /* Expand the operands. */
5277 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5278 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5279
5280 return expand_atomic_exchange (target, mem, val, model);
5281 }
5282
5283 /* Expand the __atomic_compare_exchange intrinsic:
5284 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5285 TYPE desired, BOOL weak,
5286 enum memmodel success,
5287 enum memmodel failure)
5288 EXP is the CALL_EXPR.
5289 TARGET is an optional place for us to store the results. */
5290
5291 static rtx
5292 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5293 rtx target)
5294 {
5295 rtx expect, desired, mem, oldval;
5296 enum memmodel success, failure;
5297 tree weak;
5298 bool is_weak;
5299
5300 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5301 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5302
5303 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5304 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5305 {
5306 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5307 return NULL_RTX;
5308 }
5309
5310 if (failure > success)
5311 {
5312 error ("failure memory model cannot be stronger than success "
5313 "memory model for %<__atomic_compare_exchange%>");
5314 return NULL_RTX;
5315 }
5316
5317 if (!flag_inline_atomics)
5318 return NULL_RTX;
5319
5320 /* Expand the operands. */
5321 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5322
5323 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5324 expect = convert_memory_address (Pmode, expect);
5325 expect = gen_rtx_MEM (mode, expect);
5326 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5327
5328 weak = CALL_EXPR_ARG (exp, 3);
5329 is_weak = false;
5330 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5331 is_weak = true;
5332
5333 oldval = expect;
5334 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5335 &oldval, mem, oldval, desired,
5336 is_weak, success, failure))
5337 return NULL_RTX;
5338
5339 if (oldval != expect)
5340 emit_move_insn (expect, oldval);
5341
5342 return target;
5343 }
5344
5345 /* Expand the __atomic_load intrinsic:
5346 TYPE __atomic_load (TYPE *object, enum memmodel)
5347 EXP is the CALL_EXPR.
5348 TARGET is an optional place for us to store the results. */
5349
5350 static rtx
5351 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5352 {
5353 rtx mem;
5354 enum memmodel model;
5355
5356 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5357 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5358 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5359 {
5360 error ("invalid memory model for %<__atomic_load%>");
5361 return NULL_RTX;
5362 }
5363
5364 if (!flag_inline_atomics)
5365 return NULL_RTX;
5366
5367 /* Expand the operand. */
5368 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5369
5370 return expand_atomic_load (target, mem, model);
5371 }
5372
5373
5374 /* Expand the __atomic_store intrinsic:
5375 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5376 EXP is the CALL_EXPR.
5377 TARGET is an optional place for us to store the results. */
5378
5379 static rtx
5380 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5381 {
5382 rtx mem, val;
5383 enum memmodel model;
5384
5385 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5386 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5387 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5388 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5389 {
5390 error ("invalid memory model for %<__atomic_store%>");
5391 return NULL_RTX;
5392 }
5393
5394 if (!flag_inline_atomics)
5395 return NULL_RTX;
5396
5397 /* Expand the operands. */
5398 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5399 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5400
5401 return expand_atomic_store (mem, val, model, false);
5402 }
5403
5404 /* Expand the __atomic_fetch_XXX intrinsic:
5405 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5406 EXP is the CALL_EXPR.
5407 TARGET is an optional place for us to store the results.
5408 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5409 FETCH_AFTER is true if returning the result of the operation.
5410 FETCH_AFTER is false if returning the value before the operation.
5411 IGNORE is true if the result is not used.
5412 EXT_CALL is the correct builtin for an external call if this cannot be
5413 resolved to an instruction sequence. */
5414
5415 static rtx
5416 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5417 enum rtx_code code, bool fetch_after,
5418 bool ignore, enum built_in_function ext_call)
5419 {
5420 rtx val, mem, ret;
5421 enum memmodel model;
5422 tree fndecl;
5423 tree addr;
5424
5425 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5426
5427 /* Expand the operands. */
5428 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5429 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5430
5431 /* Only try generating instructions if inlining is turned on. */
5432 if (flag_inline_atomics)
5433 {
5434 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5435 if (ret)
5436 return ret;
5437 }
5438
5439 /* Return if a different routine isn't needed for the library call. */
5440 if (ext_call == BUILT_IN_NONE)
5441 return NULL_RTX;
5442
5443 /* Change the call to the specified function. */
5444 fndecl = get_callee_fndecl (exp);
5445 addr = CALL_EXPR_FN (exp);
5446 STRIP_NOPS (addr);
5447
5448 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5449 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5450
5451 /* Expand the call here so we can emit trailing code. */
5452 ret = expand_call (exp, target, ignore);
5453
5454 /* Replace the original function just in case it matters. */
5455 TREE_OPERAND (addr, 0) = fndecl;
5456
5457 /* Then issue the arithmetic correction to return the right result. */
5458 if (!ignore)
5459 {
5460 if (code == NOT)
5461 {
5462 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5463 OPTAB_LIB_WIDEN);
5464 ret = expand_simple_unop (mode, NOT, ret, target, true);
5465 }
5466 else
5467 ret = expand_simple_binop (mode, code, ret, val, target, true,
5468 OPTAB_LIB_WIDEN);
5469 }
5470 return ret;
5471 }
5472
5473
5474 #ifndef HAVE_atomic_clear
5475 # define HAVE_atomic_clear 0
5476 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5477 #endif
5478
5479 /* Expand an atomic clear operation.
5480 void _atomic_clear (BOOL *obj, enum memmodel)
5481 EXP is the call expression. */
5482
5483 static rtx
5484 expand_builtin_atomic_clear (tree exp)
5485 {
5486 enum machine_mode mode;
5487 rtx mem, ret;
5488 enum memmodel model;
5489
5490 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5491 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5492 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5493
5494 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5495 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5496 {
5497 error ("invalid memory model for %<__atomic_store%>");
5498 return const0_rtx;
5499 }
5500
5501 if (HAVE_atomic_clear)
5502 {
5503 emit_insn (gen_atomic_clear (mem, model));
5504 return const0_rtx;
5505 }
5506
5507 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5508 Failing that, a store is issued by __atomic_store. The only way this can
5509 fail is if the bool type is larger than a word size. Unlikely, but
5510 handle it anyway for completeness. Assume a single threaded model since
5511 there is no atomic support in this case, and no barriers are required. */
5512 ret = expand_atomic_store (mem, const0_rtx, model, true);
5513 if (!ret)
5514 emit_move_insn (mem, const0_rtx);
5515 return const0_rtx;
5516 }
5517
5518 /* Expand an atomic test_and_set operation.
5519 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5520 EXP is the call expression. */
5521
5522 static rtx
5523 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5524 {
5525 rtx mem;
5526 enum memmodel model;
5527 enum machine_mode mode;
5528
5529 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5530 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5531 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5532
5533 return expand_atomic_test_and_set (target, mem, model);
5534 }
5535
5536
5537 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5538 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5539
5540 static tree
5541 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5542 {
5543 int size;
5544 enum machine_mode mode;
5545 unsigned int mode_align, type_align;
5546
5547 if (TREE_CODE (arg0) != INTEGER_CST)
5548 return NULL_TREE;
5549
5550 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5551 mode = mode_for_size (size, MODE_INT, 0);
5552 mode_align = GET_MODE_ALIGNMENT (mode);
5553
5554 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5555 type_align = mode_align;
5556 else
5557 {
5558 tree ttype = TREE_TYPE (arg1);
5559
5560 /* This function is usually invoked and folded immediately by the front
5561 end before anything else has a chance to look at it. The pointer
5562 parameter at this point is usually cast to a void *, so check for that
5563 and look past the cast. */
5564 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5565 && VOID_TYPE_P (TREE_TYPE (ttype)))
5566 arg1 = TREE_OPERAND (arg1, 0);
5567
5568 ttype = TREE_TYPE (arg1);
5569 gcc_assert (POINTER_TYPE_P (ttype));
5570
5571 /* Get the underlying type of the object. */
5572 ttype = TREE_TYPE (ttype);
5573 type_align = TYPE_ALIGN (ttype);
5574 }
5575
5576 /* If the object has smaller alignment, the the lock free routines cannot
5577 be used. */
5578 if (type_align < mode_align)
5579 return boolean_false_node;
5580
5581 /* Check if a compare_and_swap pattern exists for the mode which represents
5582 the required size. The pattern is not allowed to fail, so the existence
5583 of the pattern indicates support is present. */
5584 if (can_compare_and_swap_p (mode, true))
5585 return boolean_true_node;
5586 else
5587 return boolean_false_node;
5588 }
5589
5590 /* Return true if the parameters to call EXP represent an object which will
5591 always generate lock free instructions. The first argument represents the
5592 size of the object, and the second parameter is a pointer to the object
5593 itself. If NULL is passed for the object, then the result is based on
5594 typical alignment for an object of the specified size. Otherwise return
5595 false. */
5596
5597 static rtx
5598 expand_builtin_atomic_always_lock_free (tree exp)
5599 {
5600 tree size;
5601 tree arg0 = CALL_EXPR_ARG (exp, 0);
5602 tree arg1 = CALL_EXPR_ARG (exp, 1);
5603
5604 if (TREE_CODE (arg0) != INTEGER_CST)
5605 {
5606 error ("non-constant argument 1 to __atomic_always_lock_free");
5607 return const0_rtx;
5608 }
5609
5610 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5611 if (size == boolean_true_node)
5612 return const1_rtx;
5613 return const0_rtx;
5614 }
5615
5616 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5617 is lock free on this architecture. */
5618
5619 static tree
5620 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5621 {
5622 if (!flag_inline_atomics)
5623 return NULL_TREE;
5624
5625 /* If it isn't always lock free, don't generate a result. */
5626 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5627 return boolean_true_node;
5628
5629 return NULL_TREE;
5630 }
5631
5632 /* Return true if the parameters to call EXP represent an object which will
5633 always generate lock free instructions. The first argument represents the
5634 size of the object, and the second parameter is a pointer to the object
5635 itself. If NULL is passed for the object, then the result is based on
5636 typical alignment for an object of the specified size. Otherwise return
5637 NULL*/
5638
5639 static rtx
5640 expand_builtin_atomic_is_lock_free (tree exp)
5641 {
5642 tree size;
5643 tree arg0 = CALL_EXPR_ARG (exp, 0);
5644 tree arg1 = CALL_EXPR_ARG (exp, 1);
5645
5646 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5647 {
5648 error ("non-integer argument 1 to __atomic_is_lock_free");
5649 return NULL_RTX;
5650 }
5651
5652 if (!flag_inline_atomics)
5653 return NULL_RTX;
5654
5655 /* If the value is known at compile time, return the RTX for it. */
5656 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5657 if (size == boolean_true_node)
5658 return const1_rtx;
5659
5660 return NULL_RTX;
5661 }
5662
5663 /* Expand the __atomic_thread_fence intrinsic:
5664 void __atomic_thread_fence (enum memmodel)
5665 EXP is the CALL_EXPR. */
5666
5667 static void
5668 expand_builtin_atomic_thread_fence (tree exp)
5669 {
5670 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5671 expand_mem_thread_fence (model);
5672 }
5673
5674 /* Expand the __atomic_signal_fence intrinsic:
5675 void __atomic_signal_fence (enum memmodel)
5676 EXP is the CALL_EXPR. */
5677
5678 static void
5679 expand_builtin_atomic_signal_fence (tree exp)
5680 {
5681 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5682 expand_mem_signal_fence (model);
5683 }
5684
5685 /* Expand the __sync_synchronize intrinsic. */
5686
5687 static void
5688 expand_builtin_sync_synchronize (void)
5689 {
5690 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5691 }
5692
5693 static rtx
5694 expand_builtin_thread_pointer (tree exp, rtx target)
5695 {
5696 enum insn_code icode;
5697 if (!validate_arglist (exp, VOID_TYPE))
5698 return const0_rtx;
5699 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5700 if (icode != CODE_FOR_nothing)
5701 {
5702 struct expand_operand op;
5703 if (!REG_P (target) || GET_MODE (target) != Pmode)
5704 target = gen_reg_rtx (Pmode);
5705 create_output_operand (&op, target, Pmode);
5706 expand_insn (icode, 1, &op);
5707 return target;
5708 }
5709 error ("__builtin_thread_pointer is not supported on this target");
5710 return const0_rtx;
5711 }
5712
5713 static void
5714 expand_builtin_set_thread_pointer (tree exp)
5715 {
5716 enum insn_code icode;
5717 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5718 return;
5719 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5720 if (icode != CODE_FOR_nothing)
5721 {
5722 struct expand_operand op;
5723 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5724 Pmode, EXPAND_NORMAL);
5725 create_input_operand (&op, val, Pmode);
5726 expand_insn (icode, 1, &op);
5727 return;
5728 }
5729 error ("__builtin_set_thread_pointer is not supported on this target");
5730 }
5731
5732 \f
5733 /* Emit code to restore the current value of stack. */
5734
5735 static void
5736 expand_stack_restore (tree var)
5737 {
5738 rtx prev, sa = expand_normal (var);
5739
5740 sa = convert_memory_address (Pmode, sa);
5741
5742 prev = get_last_insn ();
5743 emit_stack_restore (SAVE_BLOCK, sa);
5744 fixup_args_size_notes (prev, get_last_insn (), 0);
5745 }
5746
5747
5748 /* Emit code to save the current value of stack. */
5749
5750 static rtx
5751 expand_stack_save (void)
5752 {
5753 rtx ret = NULL_RTX;
5754
5755 do_pending_stack_adjust ();
5756 emit_stack_save (SAVE_BLOCK, &ret);
5757 return ret;
5758 }
5759
5760 /* Expand an expression EXP that calls a built-in function,
5761 with result going to TARGET if that's convenient
5762 (and in mode MODE if that's convenient).
5763 SUBTARGET may be used as the target for computing one of EXP's operands.
5764 IGNORE is nonzero if the value is to be ignored. */
5765
5766 rtx
5767 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5768 int ignore)
5769 {
5770 tree fndecl = get_callee_fndecl (exp);
5771 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5772 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5773 int flags;
5774
5775 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5776 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5777
5778 /* When not optimizing, generate calls to library functions for a certain
5779 set of builtins. */
5780 if (!optimize
5781 && !called_as_built_in (fndecl)
5782 && fcode != BUILT_IN_FORK
5783 && fcode != BUILT_IN_EXECL
5784 && fcode != BUILT_IN_EXECV
5785 && fcode != BUILT_IN_EXECLP
5786 && fcode != BUILT_IN_EXECLE
5787 && fcode != BUILT_IN_EXECVP
5788 && fcode != BUILT_IN_EXECVE
5789 && fcode != BUILT_IN_ALLOCA
5790 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5791 && fcode != BUILT_IN_FREE)
5792 return expand_call (exp, target, ignore);
5793
5794 /* The built-in function expanders test for target == const0_rtx
5795 to determine whether the function's result will be ignored. */
5796 if (ignore)
5797 target = const0_rtx;
5798
5799 /* If the result of a pure or const built-in function is ignored, and
5800 none of its arguments are volatile, we can avoid expanding the
5801 built-in call and just evaluate the arguments for side-effects. */
5802 if (target == const0_rtx
5803 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5804 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5805 {
5806 bool volatilep = false;
5807 tree arg;
5808 call_expr_arg_iterator iter;
5809
5810 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5811 if (TREE_THIS_VOLATILE (arg))
5812 {
5813 volatilep = true;
5814 break;
5815 }
5816
5817 if (! volatilep)
5818 {
5819 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5820 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5821 return const0_rtx;
5822 }
5823 }
5824
5825 switch (fcode)
5826 {
5827 CASE_FLT_FN (BUILT_IN_FABS):
5828 case BUILT_IN_FABSD32:
5829 case BUILT_IN_FABSD64:
5830 case BUILT_IN_FABSD128:
5831 target = expand_builtin_fabs (exp, target, subtarget);
5832 if (target)
5833 return target;
5834 break;
5835
5836 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5837 target = expand_builtin_copysign (exp, target, subtarget);
5838 if (target)
5839 return target;
5840 break;
5841
5842 /* Just do a normal library call if we were unable to fold
5843 the values. */
5844 CASE_FLT_FN (BUILT_IN_CABS):
5845 break;
5846
5847 CASE_FLT_FN (BUILT_IN_EXP):
5848 CASE_FLT_FN (BUILT_IN_EXP10):
5849 CASE_FLT_FN (BUILT_IN_POW10):
5850 CASE_FLT_FN (BUILT_IN_EXP2):
5851 CASE_FLT_FN (BUILT_IN_EXPM1):
5852 CASE_FLT_FN (BUILT_IN_LOGB):
5853 CASE_FLT_FN (BUILT_IN_LOG):
5854 CASE_FLT_FN (BUILT_IN_LOG10):
5855 CASE_FLT_FN (BUILT_IN_LOG2):
5856 CASE_FLT_FN (BUILT_IN_LOG1P):
5857 CASE_FLT_FN (BUILT_IN_TAN):
5858 CASE_FLT_FN (BUILT_IN_ASIN):
5859 CASE_FLT_FN (BUILT_IN_ACOS):
5860 CASE_FLT_FN (BUILT_IN_ATAN):
5861 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5862 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5863 because of possible accuracy problems. */
5864 if (! flag_unsafe_math_optimizations)
5865 break;
5866 CASE_FLT_FN (BUILT_IN_SQRT):
5867 CASE_FLT_FN (BUILT_IN_FLOOR):
5868 CASE_FLT_FN (BUILT_IN_CEIL):
5869 CASE_FLT_FN (BUILT_IN_TRUNC):
5870 CASE_FLT_FN (BUILT_IN_ROUND):
5871 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5872 CASE_FLT_FN (BUILT_IN_RINT):
5873 target = expand_builtin_mathfn (exp, target, subtarget);
5874 if (target)
5875 return target;
5876 break;
5877
5878 CASE_FLT_FN (BUILT_IN_FMA):
5879 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5880 if (target)
5881 return target;
5882 break;
5883
5884 CASE_FLT_FN (BUILT_IN_ILOGB):
5885 if (! flag_unsafe_math_optimizations)
5886 break;
5887 CASE_FLT_FN (BUILT_IN_ISINF):
5888 CASE_FLT_FN (BUILT_IN_FINITE):
5889 case BUILT_IN_ISFINITE:
5890 case BUILT_IN_ISNORMAL:
5891 target = expand_builtin_interclass_mathfn (exp, target);
5892 if (target)
5893 return target;
5894 break;
5895
5896 CASE_FLT_FN (BUILT_IN_ICEIL):
5897 CASE_FLT_FN (BUILT_IN_LCEIL):
5898 CASE_FLT_FN (BUILT_IN_LLCEIL):
5899 CASE_FLT_FN (BUILT_IN_LFLOOR):
5900 CASE_FLT_FN (BUILT_IN_IFLOOR):
5901 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5902 target = expand_builtin_int_roundingfn (exp, target);
5903 if (target)
5904 return target;
5905 break;
5906
5907 CASE_FLT_FN (BUILT_IN_IRINT):
5908 CASE_FLT_FN (BUILT_IN_LRINT):
5909 CASE_FLT_FN (BUILT_IN_LLRINT):
5910 CASE_FLT_FN (BUILT_IN_IROUND):
5911 CASE_FLT_FN (BUILT_IN_LROUND):
5912 CASE_FLT_FN (BUILT_IN_LLROUND):
5913 target = expand_builtin_int_roundingfn_2 (exp, target);
5914 if (target)
5915 return target;
5916 break;
5917
5918 CASE_FLT_FN (BUILT_IN_POWI):
5919 target = expand_builtin_powi (exp, target);
5920 if (target)
5921 return target;
5922 break;
5923
5924 CASE_FLT_FN (BUILT_IN_ATAN2):
5925 CASE_FLT_FN (BUILT_IN_LDEXP):
5926 CASE_FLT_FN (BUILT_IN_SCALB):
5927 CASE_FLT_FN (BUILT_IN_SCALBN):
5928 CASE_FLT_FN (BUILT_IN_SCALBLN):
5929 if (! flag_unsafe_math_optimizations)
5930 break;
5931
5932 CASE_FLT_FN (BUILT_IN_FMOD):
5933 CASE_FLT_FN (BUILT_IN_REMAINDER):
5934 CASE_FLT_FN (BUILT_IN_DREM):
5935 CASE_FLT_FN (BUILT_IN_POW):
5936 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5937 if (target)
5938 return target;
5939 break;
5940
5941 CASE_FLT_FN (BUILT_IN_CEXPI):
5942 target = expand_builtin_cexpi (exp, target);
5943 gcc_assert (target);
5944 return target;
5945
5946 CASE_FLT_FN (BUILT_IN_SIN):
5947 CASE_FLT_FN (BUILT_IN_COS):
5948 if (! flag_unsafe_math_optimizations)
5949 break;
5950 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5951 if (target)
5952 return target;
5953 break;
5954
5955 CASE_FLT_FN (BUILT_IN_SINCOS):
5956 if (! flag_unsafe_math_optimizations)
5957 break;
5958 target = expand_builtin_sincos (exp);
5959 if (target)
5960 return target;
5961 break;
5962
5963 case BUILT_IN_APPLY_ARGS:
5964 return expand_builtin_apply_args ();
5965
5966 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5967 FUNCTION with a copy of the parameters described by
5968 ARGUMENTS, and ARGSIZE. It returns a block of memory
5969 allocated on the stack into which is stored all the registers
5970 that might possibly be used for returning the result of a
5971 function. ARGUMENTS is the value returned by
5972 __builtin_apply_args. ARGSIZE is the number of bytes of
5973 arguments that must be copied. ??? How should this value be
5974 computed? We'll also need a safe worst case value for varargs
5975 functions. */
5976 case BUILT_IN_APPLY:
5977 if (!validate_arglist (exp, POINTER_TYPE,
5978 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5979 && !validate_arglist (exp, REFERENCE_TYPE,
5980 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5981 return const0_rtx;
5982 else
5983 {
5984 rtx ops[3];
5985
5986 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5987 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5988 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5989
5990 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5991 }
5992
5993 /* __builtin_return (RESULT) causes the function to return the
5994 value described by RESULT. RESULT is address of the block of
5995 memory returned by __builtin_apply. */
5996 case BUILT_IN_RETURN:
5997 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5998 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5999 return const0_rtx;
6000
6001 case BUILT_IN_SAVEREGS:
6002 return expand_builtin_saveregs ();
6003
6004 case BUILT_IN_VA_ARG_PACK:
6005 /* All valid uses of __builtin_va_arg_pack () are removed during
6006 inlining. */
6007 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6008 return const0_rtx;
6009
6010 case BUILT_IN_VA_ARG_PACK_LEN:
6011 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6012 inlining. */
6013 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6014 return const0_rtx;
6015
6016 /* Return the address of the first anonymous stack arg. */
6017 case BUILT_IN_NEXT_ARG:
6018 if (fold_builtin_next_arg (exp, false))
6019 return const0_rtx;
6020 return expand_builtin_next_arg ();
6021
6022 case BUILT_IN_CLEAR_CACHE:
6023 target = expand_builtin___clear_cache (exp);
6024 if (target)
6025 return target;
6026 break;
6027
6028 case BUILT_IN_CLASSIFY_TYPE:
6029 return expand_builtin_classify_type (exp);
6030
6031 case BUILT_IN_CONSTANT_P:
6032 return const0_rtx;
6033
6034 case BUILT_IN_FRAME_ADDRESS:
6035 case BUILT_IN_RETURN_ADDRESS:
6036 return expand_builtin_frame_address (fndecl, exp);
6037
6038 /* Returns the address of the area where the structure is returned.
6039 0 otherwise. */
6040 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6041 if (call_expr_nargs (exp) != 0
6042 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6043 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6044 return const0_rtx;
6045 else
6046 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6047
6048 case BUILT_IN_ALLOCA:
6049 case BUILT_IN_ALLOCA_WITH_ALIGN:
6050 /* If the allocation stems from the declaration of a variable-sized
6051 object, it cannot accumulate. */
6052 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6053 if (target)
6054 return target;
6055 break;
6056
6057 case BUILT_IN_STACK_SAVE:
6058 return expand_stack_save ();
6059
6060 case BUILT_IN_STACK_RESTORE:
6061 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6062 return const0_rtx;
6063
6064 case BUILT_IN_BSWAP16:
6065 case BUILT_IN_BSWAP32:
6066 case BUILT_IN_BSWAP64:
6067 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6068 if (target)
6069 return target;
6070 break;
6071
6072 CASE_INT_FN (BUILT_IN_FFS):
6073 target = expand_builtin_unop (target_mode, exp, target,
6074 subtarget, ffs_optab);
6075 if (target)
6076 return target;
6077 break;
6078
6079 CASE_INT_FN (BUILT_IN_CLZ):
6080 target = expand_builtin_unop (target_mode, exp, target,
6081 subtarget, clz_optab);
6082 if (target)
6083 return target;
6084 break;
6085
6086 CASE_INT_FN (BUILT_IN_CTZ):
6087 target = expand_builtin_unop (target_mode, exp, target,
6088 subtarget, ctz_optab);
6089 if (target)
6090 return target;
6091 break;
6092
6093 CASE_INT_FN (BUILT_IN_CLRSB):
6094 target = expand_builtin_unop (target_mode, exp, target,
6095 subtarget, clrsb_optab);
6096 if (target)
6097 return target;
6098 break;
6099
6100 CASE_INT_FN (BUILT_IN_POPCOUNT):
6101 target = expand_builtin_unop (target_mode, exp, target,
6102 subtarget, popcount_optab);
6103 if (target)
6104 return target;
6105 break;
6106
6107 CASE_INT_FN (BUILT_IN_PARITY):
6108 target = expand_builtin_unop (target_mode, exp, target,
6109 subtarget, parity_optab);
6110 if (target)
6111 return target;
6112 break;
6113
6114 case BUILT_IN_STRLEN:
6115 target = expand_builtin_strlen (exp, target, target_mode);
6116 if (target)
6117 return target;
6118 break;
6119
6120 case BUILT_IN_STRCPY:
6121 target = expand_builtin_strcpy (exp, target);
6122 if (target)
6123 return target;
6124 break;
6125
6126 case BUILT_IN_STRNCPY:
6127 target = expand_builtin_strncpy (exp, target);
6128 if (target)
6129 return target;
6130 break;
6131
6132 case BUILT_IN_STPCPY:
6133 target = expand_builtin_stpcpy (exp, target, mode);
6134 if (target)
6135 return target;
6136 break;
6137
6138 case BUILT_IN_MEMCPY:
6139 target = expand_builtin_memcpy (exp, target);
6140 if (target)
6141 return target;
6142 break;
6143
6144 case BUILT_IN_MEMPCPY:
6145 target = expand_builtin_mempcpy (exp, target, mode);
6146 if (target)
6147 return target;
6148 break;
6149
6150 case BUILT_IN_MEMSET:
6151 target = expand_builtin_memset (exp, target, mode);
6152 if (target)
6153 return target;
6154 break;
6155
6156 case BUILT_IN_BZERO:
6157 target = expand_builtin_bzero (exp);
6158 if (target)
6159 return target;
6160 break;
6161
6162 case BUILT_IN_STRCMP:
6163 target = expand_builtin_strcmp (exp, target);
6164 if (target)
6165 return target;
6166 break;
6167
6168 case BUILT_IN_STRNCMP:
6169 target = expand_builtin_strncmp (exp, target, mode);
6170 if (target)
6171 return target;
6172 break;
6173
6174 case BUILT_IN_BCMP:
6175 case BUILT_IN_MEMCMP:
6176 target = expand_builtin_memcmp (exp, target, mode);
6177 if (target)
6178 return target;
6179 break;
6180
6181 case BUILT_IN_SETJMP:
6182 /* This should have been lowered to the builtins below. */
6183 gcc_unreachable ();
6184
6185 case BUILT_IN_SETJMP_SETUP:
6186 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6187 and the receiver label. */
6188 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6189 {
6190 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6191 VOIDmode, EXPAND_NORMAL);
6192 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6193 rtx label_r = label_rtx (label);
6194
6195 /* This is copied from the handling of non-local gotos. */
6196 expand_builtin_setjmp_setup (buf_addr, label_r);
6197 nonlocal_goto_handler_labels
6198 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6199 nonlocal_goto_handler_labels);
6200 /* ??? Do not let expand_label treat us as such since we would
6201 not want to be both on the list of non-local labels and on
6202 the list of forced labels. */
6203 FORCED_LABEL (label) = 0;
6204 return const0_rtx;
6205 }
6206 break;
6207
6208 case BUILT_IN_SETJMP_DISPATCHER:
6209 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6210 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6211 {
6212 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6213 rtx label_r = label_rtx (label);
6214
6215 /* Remove the dispatcher label from the list of non-local labels
6216 since the receiver labels have been added to it above. */
6217 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6218 return const0_rtx;
6219 }
6220 break;
6221
6222 case BUILT_IN_SETJMP_RECEIVER:
6223 /* __builtin_setjmp_receiver is passed the receiver label. */
6224 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6225 {
6226 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6227 rtx label_r = label_rtx (label);
6228
6229 expand_builtin_setjmp_receiver (label_r);
6230 return const0_rtx;
6231 }
6232 break;
6233
6234 /* __builtin_longjmp is passed a pointer to an array of five words.
6235 It's similar to the C library longjmp function but works with
6236 __builtin_setjmp above. */
6237 case BUILT_IN_LONGJMP:
6238 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6239 {
6240 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6241 VOIDmode, EXPAND_NORMAL);
6242 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6243
6244 if (value != const1_rtx)
6245 {
6246 error ("%<__builtin_longjmp%> second argument must be 1");
6247 return const0_rtx;
6248 }
6249
6250 expand_builtin_longjmp (buf_addr, value);
6251 return const0_rtx;
6252 }
6253 break;
6254
6255 case BUILT_IN_NONLOCAL_GOTO:
6256 target = expand_builtin_nonlocal_goto (exp);
6257 if (target)
6258 return target;
6259 break;
6260
6261 /* This updates the setjmp buffer that is its argument with the value
6262 of the current stack pointer. */
6263 case BUILT_IN_UPDATE_SETJMP_BUF:
6264 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6265 {
6266 rtx buf_addr
6267 = expand_normal (CALL_EXPR_ARG (exp, 0));
6268
6269 expand_builtin_update_setjmp_buf (buf_addr);
6270 return const0_rtx;
6271 }
6272 break;
6273
6274 case BUILT_IN_TRAP:
6275 expand_builtin_trap ();
6276 return const0_rtx;
6277
6278 case BUILT_IN_UNREACHABLE:
6279 expand_builtin_unreachable ();
6280 return const0_rtx;
6281
6282 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6283 case BUILT_IN_SIGNBITD32:
6284 case BUILT_IN_SIGNBITD64:
6285 case BUILT_IN_SIGNBITD128:
6286 target = expand_builtin_signbit (exp, target);
6287 if (target)
6288 return target;
6289 break;
6290
6291 /* Various hooks for the DWARF 2 __throw routine. */
6292 case BUILT_IN_UNWIND_INIT:
6293 expand_builtin_unwind_init ();
6294 return const0_rtx;
6295 case BUILT_IN_DWARF_CFA:
6296 return virtual_cfa_rtx;
6297 #ifdef DWARF2_UNWIND_INFO
6298 case BUILT_IN_DWARF_SP_COLUMN:
6299 return expand_builtin_dwarf_sp_column ();
6300 case BUILT_IN_INIT_DWARF_REG_SIZES:
6301 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6302 return const0_rtx;
6303 #endif
6304 case BUILT_IN_FROB_RETURN_ADDR:
6305 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6306 case BUILT_IN_EXTRACT_RETURN_ADDR:
6307 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6308 case BUILT_IN_EH_RETURN:
6309 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6310 CALL_EXPR_ARG (exp, 1));
6311 return const0_rtx;
6312 #ifdef EH_RETURN_DATA_REGNO
6313 case BUILT_IN_EH_RETURN_DATA_REGNO:
6314 return expand_builtin_eh_return_data_regno (exp);
6315 #endif
6316 case BUILT_IN_EXTEND_POINTER:
6317 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6318 case BUILT_IN_EH_POINTER:
6319 return expand_builtin_eh_pointer (exp);
6320 case BUILT_IN_EH_FILTER:
6321 return expand_builtin_eh_filter (exp);
6322 case BUILT_IN_EH_COPY_VALUES:
6323 return expand_builtin_eh_copy_values (exp);
6324
6325 case BUILT_IN_VA_START:
6326 return expand_builtin_va_start (exp);
6327 case BUILT_IN_VA_END:
6328 return expand_builtin_va_end (exp);
6329 case BUILT_IN_VA_COPY:
6330 return expand_builtin_va_copy (exp);
6331 case BUILT_IN_EXPECT:
6332 return expand_builtin_expect (exp, target);
6333 case BUILT_IN_ASSUME_ALIGNED:
6334 return expand_builtin_assume_aligned (exp, target);
6335 case BUILT_IN_PREFETCH:
6336 expand_builtin_prefetch (exp);
6337 return const0_rtx;
6338
6339 case BUILT_IN_INIT_TRAMPOLINE:
6340 return expand_builtin_init_trampoline (exp, true);
6341 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6342 return expand_builtin_init_trampoline (exp, false);
6343 case BUILT_IN_ADJUST_TRAMPOLINE:
6344 return expand_builtin_adjust_trampoline (exp);
6345
6346 case BUILT_IN_FORK:
6347 case BUILT_IN_EXECL:
6348 case BUILT_IN_EXECV:
6349 case BUILT_IN_EXECLP:
6350 case BUILT_IN_EXECLE:
6351 case BUILT_IN_EXECVP:
6352 case BUILT_IN_EXECVE:
6353 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6354 if (target)
6355 return target;
6356 break;
6357
6358 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6359 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6360 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6361 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6362 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6363 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6364 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6365 if (target)
6366 return target;
6367 break;
6368
6369 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6370 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6371 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6372 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6373 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6374 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6375 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6376 if (target)
6377 return target;
6378 break;
6379
6380 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6381 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6382 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6383 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6384 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6385 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6386 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6387 if (target)
6388 return target;
6389 break;
6390
6391 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6392 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6393 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6394 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6395 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6396 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6397 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6398 if (target)
6399 return target;
6400 break;
6401
6402 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6403 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6404 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6405 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6406 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6407 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6408 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6409 if (target)
6410 return target;
6411 break;
6412
6413 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6414 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6415 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6416 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6417 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6418 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6419 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6420 if (target)
6421 return target;
6422 break;
6423
6424 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6425 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6426 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6427 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6428 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6429 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6430 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6431 if (target)
6432 return target;
6433 break;
6434
6435 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6436 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6437 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6438 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6439 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6440 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6441 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6442 if (target)
6443 return target;
6444 break;
6445
6446 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6447 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6448 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6449 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6450 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6451 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6452 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6453 if (target)
6454 return target;
6455 break;
6456
6457 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6458 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6459 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6460 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6461 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6462 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6463 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6464 if (target)
6465 return target;
6466 break;
6467
6468 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6469 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6470 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6471 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6472 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6473 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6474 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6475 if (target)
6476 return target;
6477 break;
6478
6479 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6480 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6481 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6482 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6483 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6484 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6485 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6486 if (target)
6487 return target;
6488 break;
6489
6490 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6491 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6492 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6493 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6494 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6495 if (mode == VOIDmode)
6496 mode = TYPE_MODE (boolean_type_node);
6497 if (!target || !register_operand (target, mode))
6498 target = gen_reg_rtx (mode);
6499
6500 mode = get_builtin_sync_mode
6501 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6502 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6503 if (target)
6504 return target;
6505 break;
6506
6507 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6508 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6509 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6510 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6511 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6512 mode = get_builtin_sync_mode
6513 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6514 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6515 if (target)
6516 return target;
6517 break;
6518
6519 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6520 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6521 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6522 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6523 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6524 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6525 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6526 if (target)
6527 return target;
6528 break;
6529
6530 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6531 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6532 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6533 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6534 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6535 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6536 expand_builtin_sync_lock_release (mode, exp);
6537 return const0_rtx;
6538
6539 case BUILT_IN_SYNC_SYNCHRONIZE:
6540 expand_builtin_sync_synchronize ();
6541 return const0_rtx;
6542
6543 case BUILT_IN_ATOMIC_EXCHANGE_1:
6544 case BUILT_IN_ATOMIC_EXCHANGE_2:
6545 case BUILT_IN_ATOMIC_EXCHANGE_4:
6546 case BUILT_IN_ATOMIC_EXCHANGE_8:
6547 case BUILT_IN_ATOMIC_EXCHANGE_16:
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6549 target = expand_builtin_atomic_exchange (mode, exp, target);
6550 if (target)
6551 return target;
6552 break;
6553
6554 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6555 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6556 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6557 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6558 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6559 {
6560 unsigned int nargs, z;
6561 vec<tree, va_gc> *vec;
6562
6563 mode =
6564 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6565 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6566 if (target)
6567 return target;
6568
6569 /* If this is turned into an external library call, the weak parameter
6570 must be dropped to match the expected parameter list. */
6571 nargs = call_expr_nargs (exp);
6572 vec_alloc (vec, nargs - 1);
6573 for (z = 0; z < 3; z++)
6574 vec->quick_push (CALL_EXPR_ARG (exp, z));
6575 /* Skip the boolean weak parameter. */
6576 for (z = 4; z < 6; z++)
6577 vec->quick_push (CALL_EXPR_ARG (exp, z));
6578 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6579 break;
6580 }
6581
6582 case BUILT_IN_ATOMIC_LOAD_1:
6583 case BUILT_IN_ATOMIC_LOAD_2:
6584 case BUILT_IN_ATOMIC_LOAD_4:
6585 case BUILT_IN_ATOMIC_LOAD_8:
6586 case BUILT_IN_ATOMIC_LOAD_16:
6587 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6588 target = expand_builtin_atomic_load (mode, exp, target);
6589 if (target)
6590 return target;
6591 break;
6592
6593 case BUILT_IN_ATOMIC_STORE_1:
6594 case BUILT_IN_ATOMIC_STORE_2:
6595 case BUILT_IN_ATOMIC_STORE_4:
6596 case BUILT_IN_ATOMIC_STORE_8:
6597 case BUILT_IN_ATOMIC_STORE_16:
6598 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6599 target = expand_builtin_atomic_store (mode, exp);
6600 if (target)
6601 return const0_rtx;
6602 break;
6603
6604 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6605 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6606 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6607 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6608 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6609 {
6610 enum built_in_function lib;
6611 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6612 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6613 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6614 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6615 ignore, lib);
6616 if (target)
6617 return target;
6618 break;
6619 }
6620 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6621 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6622 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6623 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6624 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6625 {
6626 enum built_in_function lib;
6627 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6628 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6629 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6630 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6631 ignore, lib);
6632 if (target)
6633 return target;
6634 break;
6635 }
6636 case BUILT_IN_ATOMIC_AND_FETCH_1:
6637 case BUILT_IN_ATOMIC_AND_FETCH_2:
6638 case BUILT_IN_ATOMIC_AND_FETCH_4:
6639 case BUILT_IN_ATOMIC_AND_FETCH_8:
6640 case BUILT_IN_ATOMIC_AND_FETCH_16:
6641 {
6642 enum built_in_function lib;
6643 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6644 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6645 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6646 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6647 ignore, lib);
6648 if (target)
6649 return target;
6650 break;
6651 }
6652 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6653 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6654 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6655 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6656 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6657 {
6658 enum built_in_function lib;
6659 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6660 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6661 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6662 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6663 ignore, lib);
6664 if (target)
6665 return target;
6666 break;
6667 }
6668 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6669 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6670 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6671 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6672 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6673 {
6674 enum built_in_function lib;
6675 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6676 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6677 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6678 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6679 ignore, lib);
6680 if (target)
6681 return target;
6682 break;
6683 }
6684 case BUILT_IN_ATOMIC_OR_FETCH_1:
6685 case BUILT_IN_ATOMIC_OR_FETCH_2:
6686 case BUILT_IN_ATOMIC_OR_FETCH_4:
6687 case BUILT_IN_ATOMIC_OR_FETCH_8:
6688 case BUILT_IN_ATOMIC_OR_FETCH_16:
6689 {
6690 enum built_in_function lib;
6691 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6692 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6693 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6694 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6695 ignore, lib);
6696 if (target)
6697 return target;
6698 break;
6699 }
6700 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6701 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6702 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6703 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6704 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6706 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6707 ignore, BUILT_IN_NONE);
6708 if (target)
6709 return target;
6710 break;
6711
6712 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6713 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6714 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6715 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6716 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6718 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6719 ignore, BUILT_IN_NONE);
6720 if (target)
6721 return target;
6722 break;
6723
6724 case BUILT_IN_ATOMIC_FETCH_AND_1:
6725 case BUILT_IN_ATOMIC_FETCH_AND_2:
6726 case BUILT_IN_ATOMIC_FETCH_AND_4:
6727 case BUILT_IN_ATOMIC_FETCH_AND_8:
6728 case BUILT_IN_ATOMIC_FETCH_AND_16:
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6730 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6731 ignore, BUILT_IN_NONE);
6732 if (target)
6733 return target;
6734 break;
6735
6736 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6737 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6738 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6739 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6740 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6741 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6742 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6743 ignore, BUILT_IN_NONE);
6744 if (target)
6745 return target;
6746 break;
6747
6748 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6749 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6750 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6751 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6752 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6753 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6754 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6755 ignore, BUILT_IN_NONE);
6756 if (target)
6757 return target;
6758 break;
6759
6760 case BUILT_IN_ATOMIC_FETCH_OR_1:
6761 case BUILT_IN_ATOMIC_FETCH_OR_2:
6762 case BUILT_IN_ATOMIC_FETCH_OR_4:
6763 case BUILT_IN_ATOMIC_FETCH_OR_8:
6764 case BUILT_IN_ATOMIC_FETCH_OR_16:
6765 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6766 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6767 ignore, BUILT_IN_NONE);
6768 if (target)
6769 return target;
6770 break;
6771
6772 case BUILT_IN_ATOMIC_TEST_AND_SET:
6773 return expand_builtin_atomic_test_and_set (exp, target);
6774
6775 case BUILT_IN_ATOMIC_CLEAR:
6776 return expand_builtin_atomic_clear (exp);
6777
6778 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6779 return expand_builtin_atomic_always_lock_free (exp);
6780
6781 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6782 target = expand_builtin_atomic_is_lock_free (exp);
6783 if (target)
6784 return target;
6785 break;
6786
6787 case BUILT_IN_ATOMIC_THREAD_FENCE:
6788 expand_builtin_atomic_thread_fence (exp);
6789 return const0_rtx;
6790
6791 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6792 expand_builtin_atomic_signal_fence (exp);
6793 return const0_rtx;
6794
6795 case BUILT_IN_OBJECT_SIZE:
6796 return expand_builtin_object_size (exp);
6797
6798 case BUILT_IN_MEMCPY_CHK:
6799 case BUILT_IN_MEMPCPY_CHK:
6800 case BUILT_IN_MEMMOVE_CHK:
6801 case BUILT_IN_MEMSET_CHK:
6802 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6803 if (target)
6804 return target;
6805 break;
6806
6807 case BUILT_IN_STRCPY_CHK:
6808 case BUILT_IN_STPCPY_CHK:
6809 case BUILT_IN_STRNCPY_CHK:
6810 case BUILT_IN_STPNCPY_CHK:
6811 case BUILT_IN_STRCAT_CHK:
6812 case BUILT_IN_STRNCAT_CHK:
6813 case BUILT_IN_SNPRINTF_CHK:
6814 case BUILT_IN_VSNPRINTF_CHK:
6815 maybe_emit_chk_warning (exp, fcode);
6816 break;
6817
6818 case BUILT_IN_SPRINTF_CHK:
6819 case BUILT_IN_VSPRINTF_CHK:
6820 maybe_emit_sprintf_chk_warning (exp, fcode);
6821 break;
6822
6823 case BUILT_IN_FREE:
6824 if (warn_free_nonheap_object)
6825 maybe_emit_free_warning (exp);
6826 break;
6827
6828 case BUILT_IN_THREAD_POINTER:
6829 return expand_builtin_thread_pointer (exp, target);
6830
6831 case BUILT_IN_SET_THREAD_POINTER:
6832 expand_builtin_set_thread_pointer (exp);
6833 return const0_rtx;
6834
6835 case BUILT_IN_CILK_DETACH:
6836 expand_builtin_cilk_detach (exp);
6837 return const0_rtx;
6838
6839 case BUILT_IN_CILK_POP_FRAME:
6840 expand_builtin_cilk_pop_frame (exp);
6841 return const0_rtx;
6842
6843 default: /* just do library call, if unknown builtin */
6844 break;
6845 }
6846
6847 /* The switch statement above can drop through to cause the function
6848 to be called normally. */
6849 return expand_call (exp, target, ignore);
6850 }
6851
6852 /* Determine whether a tree node represents a call to a built-in
6853 function. If the tree T is a call to a built-in function with
6854 the right number of arguments of the appropriate types, return
6855 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6856 Otherwise the return value is END_BUILTINS. */
6857
6858 enum built_in_function
6859 builtin_mathfn_code (const_tree t)
6860 {
6861 const_tree fndecl, arg, parmlist;
6862 const_tree argtype, parmtype;
6863 const_call_expr_arg_iterator iter;
6864
6865 if (TREE_CODE (t) != CALL_EXPR
6866 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6867 return END_BUILTINS;
6868
6869 fndecl = get_callee_fndecl (t);
6870 if (fndecl == NULL_TREE
6871 || TREE_CODE (fndecl) != FUNCTION_DECL
6872 || ! DECL_BUILT_IN (fndecl)
6873 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6874 return END_BUILTINS;
6875
6876 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6877 init_const_call_expr_arg_iterator (t, &iter);
6878 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6879 {
6880 /* If a function doesn't take a variable number of arguments,
6881 the last element in the list will have type `void'. */
6882 parmtype = TREE_VALUE (parmlist);
6883 if (VOID_TYPE_P (parmtype))
6884 {
6885 if (more_const_call_expr_args_p (&iter))
6886 return END_BUILTINS;
6887 return DECL_FUNCTION_CODE (fndecl);
6888 }
6889
6890 if (! more_const_call_expr_args_p (&iter))
6891 return END_BUILTINS;
6892
6893 arg = next_const_call_expr_arg (&iter);
6894 argtype = TREE_TYPE (arg);
6895
6896 if (SCALAR_FLOAT_TYPE_P (parmtype))
6897 {
6898 if (! SCALAR_FLOAT_TYPE_P (argtype))
6899 return END_BUILTINS;
6900 }
6901 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6902 {
6903 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6904 return END_BUILTINS;
6905 }
6906 else if (POINTER_TYPE_P (parmtype))
6907 {
6908 if (! POINTER_TYPE_P (argtype))
6909 return END_BUILTINS;
6910 }
6911 else if (INTEGRAL_TYPE_P (parmtype))
6912 {
6913 if (! INTEGRAL_TYPE_P (argtype))
6914 return END_BUILTINS;
6915 }
6916 else
6917 return END_BUILTINS;
6918 }
6919
6920 /* Variable-length argument list. */
6921 return DECL_FUNCTION_CODE (fndecl);
6922 }
6923
6924 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6925 evaluate to a constant. */
6926
6927 static tree
6928 fold_builtin_constant_p (tree arg)
6929 {
6930 /* We return 1 for a numeric type that's known to be a constant
6931 value at compile-time or for an aggregate type that's a
6932 literal constant. */
6933 STRIP_NOPS (arg);
6934
6935 /* If we know this is a constant, emit the constant of one. */
6936 if (CONSTANT_CLASS_P (arg)
6937 || (TREE_CODE (arg) == CONSTRUCTOR
6938 && TREE_CONSTANT (arg)))
6939 return integer_one_node;
6940 if (TREE_CODE (arg) == ADDR_EXPR)
6941 {
6942 tree op = TREE_OPERAND (arg, 0);
6943 if (TREE_CODE (op) == STRING_CST
6944 || (TREE_CODE (op) == ARRAY_REF
6945 && integer_zerop (TREE_OPERAND (op, 1))
6946 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6947 return integer_one_node;
6948 }
6949
6950 /* If this expression has side effects, show we don't know it to be a
6951 constant. Likewise if it's a pointer or aggregate type since in
6952 those case we only want literals, since those are only optimized
6953 when generating RTL, not later.
6954 And finally, if we are compiling an initializer, not code, we
6955 need to return a definite result now; there's not going to be any
6956 more optimization done. */
6957 if (TREE_SIDE_EFFECTS (arg)
6958 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6959 || POINTER_TYPE_P (TREE_TYPE (arg))
6960 || cfun == 0
6961 || folding_initializer
6962 || force_folding_builtin_constant_p)
6963 return integer_zero_node;
6964
6965 return NULL_TREE;
6966 }
6967
6968 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6969 return it as a truthvalue. */
6970
6971 static tree
6972 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6973 {
6974 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6975
6976 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6977 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6978 ret_type = TREE_TYPE (TREE_TYPE (fn));
6979 pred_type = TREE_VALUE (arg_types);
6980 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6981
6982 pred = fold_convert_loc (loc, pred_type, pred);
6983 expected = fold_convert_loc (loc, expected_type, expected);
6984 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6985
6986 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6987 build_int_cst (ret_type, 0));
6988 }
6989
6990 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6991 NULL_TREE if no simplification is possible. */
6992
6993 static tree
6994 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6995 {
6996 tree inner, fndecl, inner_arg0;
6997 enum tree_code code;
6998
6999 /* Distribute the expected value over short-circuiting operators.
7000 See through the cast from truthvalue_type_node to long. */
7001 inner_arg0 = arg0;
7002 while (TREE_CODE (inner_arg0) == NOP_EXPR
7003 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7004 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7005 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7006
7007 /* If this is a builtin_expect within a builtin_expect keep the
7008 inner one. See through a comparison against a constant. It
7009 might have been added to create a thruthvalue. */
7010 inner = inner_arg0;
7011
7012 if (COMPARISON_CLASS_P (inner)
7013 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7014 inner = TREE_OPERAND (inner, 0);
7015
7016 if (TREE_CODE (inner) == CALL_EXPR
7017 && (fndecl = get_callee_fndecl (inner))
7018 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7019 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7020 return arg0;
7021
7022 inner = inner_arg0;
7023 code = TREE_CODE (inner);
7024 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7025 {
7026 tree op0 = TREE_OPERAND (inner, 0);
7027 tree op1 = TREE_OPERAND (inner, 1);
7028
7029 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7030 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7031 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7032
7033 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7034 }
7035
7036 /* If the argument isn't invariant then there's nothing else we can do. */
7037 if (!TREE_CONSTANT (inner_arg0))
7038 return NULL_TREE;
7039
7040 /* If we expect that a comparison against the argument will fold to
7041 a constant return the constant. In practice, this means a true
7042 constant or the address of a non-weak symbol. */
7043 inner = inner_arg0;
7044 STRIP_NOPS (inner);
7045 if (TREE_CODE (inner) == ADDR_EXPR)
7046 {
7047 do
7048 {
7049 inner = TREE_OPERAND (inner, 0);
7050 }
7051 while (TREE_CODE (inner) == COMPONENT_REF
7052 || TREE_CODE (inner) == ARRAY_REF);
7053 if ((TREE_CODE (inner) == VAR_DECL
7054 || TREE_CODE (inner) == FUNCTION_DECL)
7055 && DECL_WEAK (inner))
7056 return NULL_TREE;
7057 }
7058
7059 /* Otherwise, ARG0 already has the proper type for the return value. */
7060 return arg0;
7061 }
7062
7063 /* Fold a call to __builtin_classify_type with argument ARG. */
7064
7065 static tree
7066 fold_builtin_classify_type (tree arg)
7067 {
7068 if (arg == 0)
7069 return build_int_cst (integer_type_node, no_type_class);
7070
7071 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7072 }
7073
7074 /* Fold a call to __builtin_strlen with argument ARG. */
7075
7076 static tree
7077 fold_builtin_strlen (location_t loc, tree type, tree arg)
7078 {
7079 if (!validate_arg (arg, POINTER_TYPE))
7080 return NULL_TREE;
7081 else
7082 {
7083 tree len = c_strlen (arg, 0);
7084
7085 if (len)
7086 return fold_convert_loc (loc, type, len);
7087
7088 return NULL_TREE;
7089 }
7090 }
7091
7092 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7093
7094 static tree
7095 fold_builtin_inf (location_t loc, tree type, int warn)
7096 {
7097 REAL_VALUE_TYPE real;
7098
7099 /* __builtin_inff is intended to be usable to define INFINITY on all
7100 targets. If an infinity is not available, INFINITY expands "to a
7101 positive constant of type float that overflows at translation
7102 time", footnote "In this case, using INFINITY will violate the
7103 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7104 Thus we pedwarn to ensure this constraint violation is
7105 diagnosed. */
7106 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7107 pedwarn (loc, 0, "target format does not support infinity");
7108
7109 real_inf (&real);
7110 return build_real (type, real);
7111 }
7112
7113 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7114
7115 static tree
7116 fold_builtin_nan (tree arg, tree type, int quiet)
7117 {
7118 REAL_VALUE_TYPE real;
7119 const char *str;
7120
7121 if (!validate_arg (arg, POINTER_TYPE))
7122 return NULL_TREE;
7123 str = c_getstr (arg);
7124 if (!str)
7125 return NULL_TREE;
7126
7127 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7128 return NULL_TREE;
7129
7130 return build_real (type, real);
7131 }
7132
7133 /* Return true if the floating point expression T has an integer value.
7134 We also allow +Inf, -Inf and NaN to be considered integer values. */
7135
7136 static bool
7137 integer_valued_real_p (tree t)
7138 {
7139 switch (TREE_CODE (t))
7140 {
7141 case FLOAT_EXPR:
7142 return true;
7143
7144 case ABS_EXPR:
7145 case SAVE_EXPR:
7146 return integer_valued_real_p (TREE_OPERAND (t, 0));
7147
7148 case COMPOUND_EXPR:
7149 case MODIFY_EXPR:
7150 case BIND_EXPR:
7151 return integer_valued_real_p (TREE_OPERAND (t, 1));
7152
7153 case PLUS_EXPR:
7154 case MINUS_EXPR:
7155 case MULT_EXPR:
7156 case MIN_EXPR:
7157 case MAX_EXPR:
7158 return integer_valued_real_p (TREE_OPERAND (t, 0))
7159 && integer_valued_real_p (TREE_OPERAND (t, 1));
7160
7161 case COND_EXPR:
7162 return integer_valued_real_p (TREE_OPERAND (t, 1))
7163 && integer_valued_real_p (TREE_OPERAND (t, 2));
7164
7165 case REAL_CST:
7166 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7167
7168 case NOP_EXPR:
7169 {
7170 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7171 if (TREE_CODE (type) == INTEGER_TYPE)
7172 return true;
7173 if (TREE_CODE (type) == REAL_TYPE)
7174 return integer_valued_real_p (TREE_OPERAND (t, 0));
7175 break;
7176 }
7177
7178 case CALL_EXPR:
7179 switch (builtin_mathfn_code (t))
7180 {
7181 CASE_FLT_FN (BUILT_IN_CEIL):
7182 CASE_FLT_FN (BUILT_IN_FLOOR):
7183 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7184 CASE_FLT_FN (BUILT_IN_RINT):
7185 CASE_FLT_FN (BUILT_IN_ROUND):
7186 CASE_FLT_FN (BUILT_IN_TRUNC):
7187 return true;
7188
7189 CASE_FLT_FN (BUILT_IN_FMIN):
7190 CASE_FLT_FN (BUILT_IN_FMAX):
7191 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7192 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7193
7194 default:
7195 break;
7196 }
7197 break;
7198
7199 default:
7200 break;
7201 }
7202 return false;
7203 }
7204
7205 /* FNDECL is assumed to be a builtin where truncation can be propagated
7206 across (for instance floor((double)f) == (double)floorf (f).
7207 Do the transformation for a call with argument ARG. */
7208
7209 static tree
7210 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7211 {
7212 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7213
7214 if (!validate_arg (arg, REAL_TYPE))
7215 return NULL_TREE;
7216
7217 /* Integer rounding functions are idempotent. */
7218 if (fcode == builtin_mathfn_code (arg))
7219 return arg;
7220
7221 /* If argument is already integer valued, and we don't need to worry
7222 about setting errno, there's no need to perform rounding. */
7223 if (! flag_errno_math && integer_valued_real_p (arg))
7224 return arg;
7225
7226 if (optimize)
7227 {
7228 tree arg0 = strip_float_extensions (arg);
7229 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7230 tree newtype = TREE_TYPE (arg0);
7231 tree decl;
7232
7233 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7234 && (decl = mathfn_built_in (newtype, fcode)))
7235 return fold_convert_loc (loc, ftype,
7236 build_call_expr_loc (loc, decl, 1,
7237 fold_convert_loc (loc,
7238 newtype,
7239 arg0)));
7240 }
7241 return NULL_TREE;
7242 }
7243
7244 /* FNDECL is assumed to be builtin which can narrow the FP type of
7245 the argument, for instance lround((double)f) -> lroundf (f).
7246 Do the transformation for a call with argument ARG. */
7247
7248 static tree
7249 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7250 {
7251 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7252
7253 if (!validate_arg (arg, REAL_TYPE))
7254 return NULL_TREE;
7255
7256 /* If argument is already integer valued, and we don't need to worry
7257 about setting errno, there's no need to perform rounding. */
7258 if (! flag_errno_math && integer_valued_real_p (arg))
7259 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7260 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7261
7262 if (optimize)
7263 {
7264 tree ftype = TREE_TYPE (arg);
7265 tree arg0 = strip_float_extensions (arg);
7266 tree newtype = TREE_TYPE (arg0);
7267 tree decl;
7268
7269 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7270 && (decl = mathfn_built_in (newtype, fcode)))
7271 return build_call_expr_loc (loc, decl, 1,
7272 fold_convert_loc (loc, newtype, arg0));
7273 }
7274
7275 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7276 sizeof (int) == sizeof (long). */
7277 if (TYPE_PRECISION (integer_type_node)
7278 == TYPE_PRECISION (long_integer_type_node))
7279 {
7280 tree newfn = NULL_TREE;
7281 switch (fcode)
7282 {
7283 CASE_FLT_FN (BUILT_IN_ICEIL):
7284 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7285 break;
7286
7287 CASE_FLT_FN (BUILT_IN_IFLOOR):
7288 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7289 break;
7290
7291 CASE_FLT_FN (BUILT_IN_IROUND):
7292 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7293 break;
7294
7295 CASE_FLT_FN (BUILT_IN_IRINT):
7296 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7297 break;
7298
7299 default:
7300 break;
7301 }
7302
7303 if (newfn)
7304 {
7305 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7306 return fold_convert_loc (loc,
7307 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7308 }
7309 }
7310
7311 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7312 sizeof (long long) == sizeof (long). */
7313 if (TYPE_PRECISION (long_long_integer_type_node)
7314 == TYPE_PRECISION (long_integer_type_node))
7315 {
7316 tree newfn = NULL_TREE;
7317 switch (fcode)
7318 {
7319 CASE_FLT_FN (BUILT_IN_LLCEIL):
7320 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7321 break;
7322
7323 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7324 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7325 break;
7326
7327 CASE_FLT_FN (BUILT_IN_LLROUND):
7328 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7329 break;
7330
7331 CASE_FLT_FN (BUILT_IN_LLRINT):
7332 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7333 break;
7334
7335 default:
7336 break;
7337 }
7338
7339 if (newfn)
7340 {
7341 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7342 return fold_convert_loc (loc,
7343 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7344 }
7345 }
7346
7347 return NULL_TREE;
7348 }
7349
7350 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7351 return type. Return NULL_TREE if no simplification can be made. */
7352
7353 static tree
7354 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7355 {
7356 tree res;
7357
7358 if (!validate_arg (arg, COMPLEX_TYPE)
7359 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7360 return NULL_TREE;
7361
7362 /* Calculate the result when the argument is a constant. */
7363 if (TREE_CODE (arg) == COMPLEX_CST
7364 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7365 type, mpfr_hypot)))
7366 return res;
7367
7368 if (TREE_CODE (arg) == COMPLEX_EXPR)
7369 {
7370 tree real = TREE_OPERAND (arg, 0);
7371 tree imag = TREE_OPERAND (arg, 1);
7372
7373 /* If either part is zero, cabs is fabs of the other. */
7374 if (real_zerop (real))
7375 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7376 if (real_zerop (imag))
7377 return fold_build1_loc (loc, ABS_EXPR, type, real);
7378
7379 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7380 if (flag_unsafe_math_optimizations
7381 && operand_equal_p (real, imag, OEP_PURE_SAME))
7382 {
7383 const REAL_VALUE_TYPE sqrt2_trunc
7384 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7385 STRIP_NOPS (real);
7386 return fold_build2_loc (loc, MULT_EXPR, type,
7387 fold_build1_loc (loc, ABS_EXPR, type, real),
7388 build_real (type, sqrt2_trunc));
7389 }
7390 }
7391
7392 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7393 if (TREE_CODE (arg) == NEGATE_EXPR
7394 || TREE_CODE (arg) == CONJ_EXPR)
7395 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7396
7397 /* Don't do this when optimizing for size. */
7398 if (flag_unsafe_math_optimizations
7399 && optimize && optimize_function_for_speed_p (cfun))
7400 {
7401 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7402
7403 if (sqrtfn != NULL_TREE)
7404 {
7405 tree rpart, ipart, result;
7406
7407 arg = builtin_save_expr (arg);
7408
7409 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7410 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7411
7412 rpart = builtin_save_expr (rpart);
7413 ipart = builtin_save_expr (ipart);
7414
7415 result = fold_build2_loc (loc, PLUS_EXPR, type,
7416 fold_build2_loc (loc, MULT_EXPR, type,
7417 rpart, rpart),
7418 fold_build2_loc (loc, MULT_EXPR, type,
7419 ipart, ipart));
7420
7421 return build_call_expr_loc (loc, sqrtfn, 1, result);
7422 }
7423 }
7424
7425 return NULL_TREE;
7426 }
7427
7428 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7429 complex tree type of the result. If NEG is true, the imaginary
7430 zero is negative. */
7431
7432 static tree
7433 build_complex_cproj (tree type, bool neg)
7434 {
7435 REAL_VALUE_TYPE rinf, rzero = dconst0;
7436
7437 real_inf (&rinf);
7438 rzero.sign = neg;
7439 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7440 build_real (TREE_TYPE (type), rzero));
7441 }
7442
7443 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7444 return type. Return NULL_TREE if no simplification can be made. */
7445
7446 static tree
7447 fold_builtin_cproj (location_t loc, tree arg, tree type)
7448 {
7449 if (!validate_arg (arg, COMPLEX_TYPE)
7450 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7451 return NULL_TREE;
7452
7453 /* If there are no infinities, return arg. */
7454 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7455 return non_lvalue_loc (loc, arg);
7456
7457 /* Calculate the result when the argument is a constant. */
7458 if (TREE_CODE (arg) == COMPLEX_CST)
7459 {
7460 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7461 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7462
7463 if (real_isinf (real) || real_isinf (imag))
7464 return build_complex_cproj (type, imag->sign);
7465 else
7466 return arg;
7467 }
7468 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7469 {
7470 tree real = TREE_OPERAND (arg, 0);
7471 tree imag = TREE_OPERAND (arg, 1);
7472
7473 STRIP_NOPS (real);
7474 STRIP_NOPS (imag);
7475
7476 /* If the real part is inf and the imag part is known to be
7477 nonnegative, return (inf + 0i). Remember side-effects are
7478 possible in the imag part. */
7479 if (TREE_CODE (real) == REAL_CST
7480 && real_isinf (TREE_REAL_CST_PTR (real))
7481 && tree_expr_nonnegative_p (imag))
7482 return omit_one_operand_loc (loc, type,
7483 build_complex_cproj (type, false),
7484 arg);
7485
7486 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7487 Remember side-effects are possible in the real part. */
7488 if (TREE_CODE (imag) == REAL_CST
7489 && real_isinf (TREE_REAL_CST_PTR (imag)))
7490 return
7491 omit_one_operand_loc (loc, type,
7492 build_complex_cproj (type, TREE_REAL_CST_PTR
7493 (imag)->sign), arg);
7494 }
7495
7496 return NULL_TREE;
7497 }
7498
7499 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7500 Return NULL_TREE if no simplification can be made. */
7501
7502 static tree
7503 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7504 {
7505
7506 enum built_in_function fcode;
7507 tree res;
7508
7509 if (!validate_arg (arg, REAL_TYPE))
7510 return NULL_TREE;
7511
7512 /* Calculate the result when the argument is a constant. */
7513 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7514 return res;
7515
7516 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7517 fcode = builtin_mathfn_code (arg);
7518 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7519 {
7520 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7521 arg = fold_build2_loc (loc, MULT_EXPR, type,
7522 CALL_EXPR_ARG (arg, 0),
7523 build_real (type, dconsthalf));
7524 return build_call_expr_loc (loc, expfn, 1, arg);
7525 }
7526
7527 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7528 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7529 {
7530 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7531
7532 if (powfn)
7533 {
7534 tree arg0 = CALL_EXPR_ARG (arg, 0);
7535 tree tree_root;
7536 /* The inner root was either sqrt or cbrt. */
7537 /* This was a conditional expression but it triggered a bug
7538 in Sun C 5.5. */
7539 REAL_VALUE_TYPE dconstroot;
7540 if (BUILTIN_SQRT_P (fcode))
7541 dconstroot = dconsthalf;
7542 else
7543 dconstroot = dconst_third ();
7544
7545 /* Adjust for the outer root. */
7546 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7547 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7548 tree_root = build_real (type, dconstroot);
7549 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7550 }
7551 }
7552
7553 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7554 if (flag_unsafe_math_optimizations
7555 && (fcode == BUILT_IN_POW
7556 || fcode == BUILT_IN_POWF
7557 || fcode == BUILT_IN_POWL))
7558 {
7559 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7560 tree arg0 = CALL_EXPR_ARG (arg, 0);
7561 tree arg1 = CALL_EXPR_ARG (arg, 1);
7562 tree narg1;
7563 if (!tree_expr_nonnegative_p (arg0))
7564 arg0 = build1 (ABS_EXPR, type, arg0);
7565 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7566 build_real (type, dconsthalf));
7567 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7568 }
7569
7570 return NULL_TREE;
7571 }
7572
7573 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7574 Return NULL_TREE if no simplification can be made. */
7575
7576 static tree
7577 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7578 {
7579 const enum built_in_function fcode = builtin_mathfn_code (arg);
7580 tree res;
7581
7582 if (!validate_arg (arg, REAL_TYPE))
7583 return NULL_TREE;
7584
7585 /* Calculate the result when the argument is a constant. */
7586 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7587 return res;
7588
7589 if (flag_unsafe_math_optimizations)
7590 {
7591 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7592 if (BUILTIN_EXPONENT_P (fcode))
7593 {
7594 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7595 const REAL_VALUE_TYPE third_trunc =
7596 real_value_truncate (TYPE_MODE (type), dconst_third ());
7597 arg = fold_build2_loc (loc, MULT_EXPR, type,
7598 CALL_EXPR_ARG (arg, 0),
7599 build_real (type, third_trunc));
7600 return build_call_expr_loc (loc, expfn, 1, arg);
7601 }
7602
7603 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7604 if (BUILTIN_SQRT_P (fcode))
7605 {
7606 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7607
7608 if (powfn)
7609 {
7610 tree arg0 = CALL_EXPR_ARG (arg, 0);
7611 tree tree_root;
7612 REAL_VALUE_TYPE dconstroot = dconst_third ();
7613
7614 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7615 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7616 tree_root = build_real (type, dconstroot);
7617 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7618 }
7619 }
7620
7621 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7622 if (BUILTIN_CBRT_P (fcode))
7623 {
7624 tree arg0 = CALL_EXPR_ARG (arg, 0);
7625 if (tree_expr_nonnegative_p (arg0))
7626 {
7627 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7628
7629 if (powfn)
7630 {
7631 tree tree_root;
7632 REAL_VALUE_TYPE dconstroot;
7633
7634 real_arithmetic (&dconstroot, MULT_EXPR,
7635 dconst_third_ptr (), dconst_third_ptr ());
7636 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7637 tree_root = build_real (type, dconstroot);
7638 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7639 }
7640 }
7641 }
7642
7643 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7644 if (fcode == BUILT_IN_POW
7645 || fcode == BUILT_IN_POWF
7646 || fcode == BUILT_IN_POWL)
7647 {
7648 tree arg00 = CALL_EXPR_ARG (arg, 0);
7649 tree arg01 = CALL_EXPR_ARG (arg, 1);
7650 if (tree_expr_nonnegative_p (arg00))
7651 {
7652 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7653 const REAL_VALUE_TYPE dconstroot
7654 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7655 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7656 build_real (type, dconstroot));
7657 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7658 }
7659 }
7660 }
7661 return NULL_TREE;
7662 }
7663
7664 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7665 TYPE is the type of the return value. Return NULL_TREE if no
7666 simplification can be made. */
7667
7668 static tree
7669 fold_builtin_cos (location_t loc,
7670 tree arg, tree type, tree fndecl)
7671 {
7672 tree res, narg;
7673
7674 if (!validate_arg (arg, REAL_TYPE))
7675 return NULL_TREE;
7676
7677 /* Calculate the result when the argument is a constant. */
7678 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7679 return res;
7680
7681 /* Optimize cos(-x) into cos (x). */
7682 if ((narg = fold_strip_sign_ops (arg)))
7683 return build_call_expr_loc (loc, fndecl, 1, narg);
7684
7685 return NULL_TREE;
7686 }
7687
7688 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7689 Return NULL_TREE if no simplification can be made. */
7690
7691 static tree
7692 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7693 {
7694 if (validate_arg (arg, REAL_TYPE))
7695 {
7696 tree res, narg;
7697
7698 /* Calculate the result when the argument is a constant. */
7699 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7700 return res;
7701
7702 /* Optimize cosh(-x) into cosh (x). */
7703 if ((narg = fold_strip_sign_ops (arg)))
7704 return build_call_expr_loc (loc, fndecl, 1, narg);
7705 }
7706
7707 return NULL_TREE;
7708 }
7709
7710 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7711 argument ARG. TYPE is the type of the return value. Return
7712 NULL_TREE if no simplification can be made. */
7713
7714 static tree
7715 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7716 bool hyper)
7717 {
7718 if (validate_arg (arg, COMPLEX_TYPE)
7719 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7720 {
7721 tree tmp;
7722
7723 /* Calculate the result when the argument is a constant. */
7724 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7725 return tmp;
7726
7727 /* Optimize fn(-x) into fn(x). */
7728 if ((tmp = fold_strip_sign_ops (arg)))
7729 return build_call_expr_loc (loc, fndecl, 1, tmp);
7730 }
7731
7732 return NULL_TREE;
7733 }
7734
7735 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7736 Return NULL_TREE if no simplification can be made. */
7737
7738 static tree
7739 fold_builtin_tan (tree arg, tree type)
7740 {
7741 enum built_in_function fcode;
7742 tree res;
7743
7744 if (!validate_arg (arg, REAL_TYPE))
7745 return NULL_TREE;
7746
7747 /* Calculate the result when the argument is a constant. */
7748 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7749 return res;
7750
7751 /* Optimize tan(atan(x)) = x. */
7752 fcode = builtin_mathfn_code (arg);
7753 if (flag_unsafe_math_optimizations
7754 && (fcode == BUILT_IN_ATAN
7755 || fcode == BUILT_IN_ATANF
7756 || fcode == BUILT_IN_ATANL))
7757 return CALL_EXPR_ARG (arg, 0);
7758
7759 return NULL_TREE;
7760 }
7761
7762 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7763 NULL_TREE if no simplification can be made. */
7764
7765 static tree
7766 fold_builtin_sincos (location_t loc,
7767 tree arg0, tree arg1, tree arg2)
7768 {
7769 tree type;
7770 tree res, fn, call;
7771
7772 if (!validate_arg (arg0, REAL_TYPE)
7773 || !validate_arg (arg1, POINTER_TYPE)
7774 || !validate_arg (arg2, POINTER_TYPE))
7775 return NULL_TREE;
7776
7777 type = TREE_TYPE (arg0);
7778
7779 /* Calculate the result when the argument is a constant. */
7780 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7781 return res;
7782
7783 /* Canonicalize sincos to cexpi. */
7784 if (!targetm.libc_has_function (function_c99_math_complex))
7785 return NULL_TREE;
7786 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7787 if (!fn)
7788 return NULL_TREE;
7789
7790 call = build_call_expr_loc (loc, fn, 1, arg0);
7791 call = builtin_save_expr (call);
7792
7793 return build2 (COMPOUND_EXPR, void_type_node,
7794 build2 (MODIFY_EXPR, void_type_node,
7795 build_fold_indirect_ref_loc (loc, arg1),
7796 build1 (IMAGPART_EXPR, type, call)),
7797 build2 (MODIFY_EXPR, void_type_node,
7798 build_fold_indirect_ref_loc (loc, arg2),
7799 build1 (REALPART_EXPR, type, call)));
7800 }
7801
7802 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7803 NULL_TREE if no simplification can be made. */
7804
7805 static tree
7806 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7807 {
7808 tree rtype;
7809 tree realp, imagp, ifn;
7810 tree res;
7811
7812 if (!validate_arg (arg0, COMPLEX_TYPE)
7813 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7814 return NULL_TREE;
7815
7816 /* Calculate the result when the argument is a constant. */
7817 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7818 return res;
7819
7820 rtype = TREE_TYPE (TREE_TYPE (arg0));
7821
7822 /* In case we can figure out the real part of arg0 and it is constant zero
7823 fold to cexpi. */
7824 if (!targetm.libc_has_function (function_c99_math_complex))
7825 return NULL_TREE;
7826 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7827 if (!ifn)
7828 return NULL_TREE;
7829
7830 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7831 && real_zerop (realp))
7832 {
7833 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7834 return build_call_expr_loc (loc, ifn, 1, narg);
7835 }
7836
7837 /* In case we can easily decompose real and imaginary parts split cexp
7838 to exp (r) * cexpi (i). */
7839 if (flag_unsafe_math_optimizations
7840 && realp)
7841 {
7842 tree rfn, rcall, icall;
7843
7844 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7845 if (!rfn)
7846 return NULL_TREE;
7847
7848 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7849 if (!imagp)
7850 return NULL_TREE;
7851
7852 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7853 icall = builtin_save_expr (icall);
7854 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7855 rcall = builtin_save_expr (rcall);
7856 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7857 fold_build2_loc (loc, MULT_EXPR, rtype,
7858 rcall,
7859 fold_build1_loc (loc, REALPART_EXPR,
7860 rtype, icall)),
7861 fold_build2_loc (loc, MULT_EXPR, rtype,
7862 rcall,
7863 fold_build1_loc (loc, IMAGPART_EXPR,
7864 rtype, icall)));
7865 }
7866
7867 return NULL_TREE;
7868 }
7869
7870 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7871 Return NULL_TREE if no simplification can be made. */
7872
7873 static tree
7874 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7875 {
7876 if (!validate_arg (arg, REAL_TYPE))
7877 return NULL_TREE;
7878
7879 /* Optimize trunc of constant value. */
7880 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7881 {
7882 REAL_VALUE_TYPE r, x;
7883 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7884
7885 x = TREE_REAL_CST (arg);
7886 real_trunc (&r, TYPE_MODE (type), &x);
7887 return build_real (type, r);
7888 }
7889
7890 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7891 }
7892
7893 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7894 Return NULL_TREE if no simplification can be made. */
7895
7896 static tree
7897 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7898 {
7899 if (!validate_arg (arg, REAL_TYPE))
7900 return NULL_TREE;
7901
7902 /* Optimize floor of constant value. */
7903 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7904 {
7905 REAL_VALUE_TYPE x;
7906
7907 x = TREE_REAL_CST (arg);
7908 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7909 {
7910 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7911 REAL_VALUE_TYPE r;
7912
7913 real_floor (&r, TYPE_MODE (type), &x);
7914 return build_real (type, r);
7915 }
7916 }
7917
7918 /* Fold floor (x) where x is nonnegative to trunc (x). */
7919 if (tree_expr_nonnegative_p (arg))
7920 {
7921 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7922 if (truncfn)
7923 return build_call_expr_loc (loc, truncfn, 1, arg);
7924 }
7925
7926 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7927 }
7928
7929 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7930 Return NULL_TREE if no simplification can be made. */
7931
7932 static tree
7933 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7934 {
7935 if (!validate_arg (arg, REAL_TYPE))
7936 return NULL_TREE;
7937
7938 /* Optimize ceil of constant value. */
7939 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7940 {
7941 REAL_VALUE_TYPE x;
7942
7943 x = TREE_REAL_CST (arg);
7944 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7945 {
7946 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7947 REAL_VALUE_TYPE r;
7948
7949 real_ceil (&r, TYPE_MODE (type), &x);
7950 return build_real (type, r);
7951 }
7952 }
7953
7954 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7955 }
7956
7957 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7958 Return NULL_TREE if no simplification can be made. */
7959
7960 static tree
7961 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7962 {
7963 if (!validate_arg (arg, REAL_TYPE))
7964 return NULL_TREE;
7965
7966 /* Optimize round of constant value. */
7967 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7968 {
7969 REAL_VALUE_TYPE x;
7970
7971 x = TREE_REAL_CST (arg);
7972 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7973 {
7974 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7975 REAL_VALUE_TYPE r;
7976
7977 real_round (&r, TYPE_MODE (type), &x);
7978 return build_real (type, r);
7979 }
7980 }
7981
7982 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7983 }
7984
7985 /* Fold function call to builtin lround, lroundf or lroundl (or the
7986 corresponding long long versions) and other rounding functions. ARG
7987 is the argument to the call. Return NULL_TREE if no simplification
7988 can be made. */
7989
7990 static tree
7991 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7992 {
7993 if (!validate_arg (arg, REAL_TYPE))
7994 return NULL_TREE;
7995
7996 /* Optimize lround of constant value. */
7997 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7998 {
7999 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8000
8001 if (real_isfinite (&x))
8002 {
8003 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8004 tree ftype = TREE_TYPE (arg);
8005 double_int val;
8006 REAL_VALUE_TYPE r;
8007
8008 switch (DECL_FUNCTION_CODE (fndecl))
8009 {
8010 CASE_FLT_FN (BUILT_IN_IFLOOR):
8011 CASE_FLT_FN (BUILT_IN_LFLOOR):
8012 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8013 real_floor (&r, TYPE_MODE (ftype), &x);
8014 break;
8015
8016 CASE_FLT_FN (BUILT_IN_ICEIL):
8017 CASE_FLT_FN (BUILT_IN_LCEIL):
8018 CASE_FLT_FN (BUILT_IN_LLCEIL):
8019 real_ceil (&r, TYPE_MODE (ftype), &x);
8020 break;
8021
8022 CASE_FLT_FN (BUILT_IN_IROUND):
8023 CASE_FLT_FN (BUILT_IN_LROUND):
8024 CASE_FLT_FN (BUILT_IN_LLROUND):
8025 real_round (&r, TYPE_MODE (ftype), &x);
8026 break;
8027
8028 default:
8029 gcc_unreachable ();
8030 }
8031
8032 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8033 if (double_int_fits_to_tree_p (itype, val))
8034 return double_int_to_tree (itype, val);
8035 }
8036 }
8037
8038 switch (DECL_FUNCTION_CODE (fndecl))
8039 {
8040 CASE_FLT_FN (BUILT_IN_LFLOOR):
8041 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8042 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8043 if (tree_expr_nonnegative_p (arg))
8044 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8045 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8046 break;
8047 default:;
8048 }
8049
8050 return fold_fixed_mathfn (loc, fndecl, arg);
8051 }
8052
8053 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8054 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8055 the argument to the call. Return NULL_TREE if no simplification can
8056 be made. */
8057
8058 static tree
8059 fold_builtin_bitop (tree fndecl, tree arg)
8060 {
8061 if (!validate_arg (arg, INTEGER_TYPE))
8062 return NULL_TREE;
8063
8064 /* Optimize for constant argument. */
8065 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8066 {
8067 HOST_WIDE_INT hi, width, result;
8068 unsigned HOST_WIDE_INT lo;
8069 tree type;
8070
8071 type = TREE_TYPE (arg);
8072 width = TYPE_PRECISION (type);
8073 lo = TREE_INT_CST_LOW (arg);
8074
8075 /* Clear all the bits that are beyond the type's precision. */
8076 if (width > HOST_BITS_PER_WIDE_INT)
8077 {
8078 hi = TREE_INT_CST_HIGH (arg);
8079 if (width < HOST_BITS_PER_DOUBLE_INT)
8080 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8081 }
8082 else
8083 {
8084 hi = 0;
8085 if (width < HOST_BITS_PER_WIDE_INT)
8086 lo &= ~(HOST_WIDE_INT_M1U << width);
8087 }
8088
8089 switch (DECL_FUNCTION_CODE (fndecl))
8090 {
8091 CASE_INT_FN (BUILT_IN_FFS):
8092 if (lo != 0)
8093 result = ffs_hwi (lo);
8094 else if (hi != 0)
8095 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8096 else
8097 result = 0;
8098 break;
8099
8100 CASE_INT_FN (BUILT_IN_CLZ):
8101 if (hi != 0)
8102 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8103 else if (lo != 0)
8104 result = width - floor_log2 (lo) - 1;
8105 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8106 result = width;
8107 break;
8108
8109 CASE_INT_FN (BUILT_IN_CTZ):
8110 if (lo != 0)
8111 result = ctz_hwi (lo);
8112 else if (hi != 0)
8113 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8114 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8115 result = width;
8116 break;
8117
8118 CASE_INT_FN (BUILT_IN_CLRSB):
8119 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8120 return NULL_TREE;
8121 if (width > HOST_BITS_PER_WIDE_INT
8122 && (hi & ((unsigned HOST_WIDE_INT) 1
8123 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8124 {
8125 hi = ~hi & ~(HOST_WIDE_INT_M1U
8126 << (width - HOST_BITS_PER_WIDE_INT - 1));
8127 lo = ~lo;
8128 }
8129 else if (width <= HOST_BITS_PER_WIDE_INT
8130 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8131 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8132 if (hi != 0)
8133 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8134 else if (lo != 0)
8135 result = width - floor_log2 (lo) - 2;
8136 else
8137 result = width - 1;
8138 break;
8139
8140 CASE_INT_FN (BUILT_IN_POPCOUNT):
8141 result = 0;
8142 while (lo)
8143 result++, lo &= lo - 1;
8144 while (hi)
8145 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8146 break;
8147
8148 CASE_INT_FN (BUILT_IN_PARITY):
8149 result = 0;
8150 while (lo)
8151 result++, lo &= lo - 1;
8152 while (hi)
8153 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8154 result &= 1;
8155 break;
8156
8157 default:
8158 gcc_unreachable ();
8159 }
8160
8161 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8162 }
8163
8164 return NULL_TREE;
8165 }
8166
8167 /* Fold function call to builtin_bswap and the short, long and long long
8168 variants. Return NULL_TREE if no simplification can be made. */
8169 static tree
8170 fold_builtin_bswap (tree fndecl, tree arg)
8171 {
8172 if (! validate_arg (arg, INTEGER_TYPE))
8173 return NULL_TREE;
8174
8175 /* Optimize constant value. */
8176 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8177 {
8178 HOST_WIDE_INT hi, width, r_hi = 0;
8179 unsigned HOST_WIDE_INT lo, r_lo = 0;
8180 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8181
8182 width = TYPE_PRECISION (type);
8183 lo = TREE_INT_CST_LOW (arg);
8184 hi = TREE_INT_CST_HIGH (arg);
8185
8186 switch (DECL_FUNCTION_CODE (fndecl))
8187 {
8188 case BUILT_IN_BSWAP16:
8189 case BUILT_IN_BSWAP32:
8190 case BUILT_IN_BSWAP64:
8191 {
8192 int s;
8193
8194 for (s = 0; s < width; s += 8)
8195 {
8196 int d = width - s - 8;
8197 unsigned HOST_WIDE_INT byte;
8198
8199 if (s < HOST_BITS_PER_WIDE_INT)
8200 byte = (lo >> s) & 0xff;
8201 else
8202 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8203
8204 if (d < HOST_BITS_PER_WIDE_INT)
8205 r_lo |= byte << d;
8206 else
8207 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8208 }
8209 }
8210
8211 break;
8212
8213 default:
8214 gcc_unreachable ();
8215 }
8216
8217 if (width < HOST_BITS_PER_WIDE_INT)
8218 return build_int_cst (type, r_lo);
8219 else
8220 return build_int_cst_wide (type, r_lo, r_hi);
8221 }
8222
8223 return NULL_TREE;
8224 }
8225
8226 /* A subroutine of fold_builtin to fold the various logarithmic
8227 functions. Return NULL_TREE if no simplification can me made.
8228 FUNC is the corresponding MPFR logarithm function. */
8229
8230 static tree
8231 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8232 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8233 {
8234 if (validate_arg (arg, REAL_TYPE))
8235 {
8236 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8237 tree res;
8238 const enum built_in_function fcode = builtin_mathfn_code (arg);
8239
8240 /* Calculate the result when the argument is a constant. */
8241 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8242 return res;
8243
8244 /* Special case, optimize logN(expN(x)) = x. */
8245 if (flag_unsafe_math_optimizations
8246 && ((func == mpfr_log
8247 && (fcode == BUILT_IN_EXP
8248 || fcode == BUILT_IN_EXPF
8249 || fcode == BUILT_IN_EXPL))
8250 || (func == mpfr_log2
8251 && (fcode == BUILT_IN_EXP2
8252 || fcode == BUILT_IN_EXP2F
8253 || fcode == BUILT_IN_EXP2L))
8254 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8255 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8256
8257 /* Optimize logN(func()) for various exponential functions. We
8258 want to determine the value "x" and the power "exponent" in
8259 order to transform logN(x**exponent) into exponent*logN(x). */
8260 if (flag_unsafe_math_optimizations)
8261 {
8262 tree exponent = 0, x = 0;
8263
8264 switch (fcode)
8265 {
8266 CASE_FLT_FN (BUILT_IN_EXP):
8267 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8268 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8269 dconst_e ()));
8270 exponent = CALL_EXPR_ARG (arg, 0);
8271 break;
8272 CASE_FLT_FN (BUILT_IN_EXP2):
8273 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8274 x = build_real (type, dconst2);
8275 exponent = CALL_EXPR_ARG (arg, 0);
8276 break;
8277 CASE_FLT_FN (BUILT_IN_EXP10):
8278 CASE_FLT_FN (BUILT_IN_POW10):
8279 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8280 {
8281 REAL_VALUE_TYPE dconst10;
8282 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8283 x = build_real (type, dconst10);
8284 }
8285 exponent = CALL_EXPR_ARG (arg, 0);
8286 break;
8287 CASE_FLT_FN (BUILT_IN_SQRT):
8288 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8289 x = CALL_EXPR_ARG (arg, 0);
8290 exponent = build_real (type, dconsthalf);
8291 break;
8292 CASE_FLT_FN (BUILT_IN_CBRT):
8293 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8294 x = CALL_EXPR_ARG (arg, 0);
8295 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8296 dconst_third ()));
8297 break;
8298 CASE_FLT_FN (BUILT_IN_POW):
8299 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8300 x = CALL_EXPR_ARG (arg, 0);
8301 exponent = CALL_EXPR_ARG (arg, 1);
8302 break;
8303 default:
8304 break;
8305 }
8306
8307 /* Now perform the optimization. */
8308 if (x && exponent)
8309 {
8310 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8311 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8312 }
8313 }
8314 }
8315
8316 return NULL_TREE;
8317 }
8318
8319 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8320 NULL_TREE if no simplification can be made. */
8321
8322 static tree
8323 fold_builtin_hypot (location_t loc, tree fndecl,
8324 tree arg0, tree arg1, tree type)
8325 {
8326 tree res, narg0, narg1;
8327
8328 if (!validate_arg (arg0, REAL_TYPE)
8329 || !validate_arg (arg1, REAL_TYPE))
8330 return NULL_TREE;
8331
8332 /* Calculate the result when the argument is a constant. */
8333 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8334 return res;
8335
8336 /* If either argument to hypot has a negate or abs, strip that off.
8337 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8338 narg0 = fold_strip_sign_ops (arg0);
8339 narg1 = fold_strip_sign_ops (arg1);
8340 if (narg0 || narg1)
8341 {
8342 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8343 narg1 ? narg1 : arg1);
8344 }
8345
8346 /* If either argument is zero, hypot is fabs of the other. */
8347 if (real_zerop (arg0))
8348 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8349 else if (real_zerop (arg1))
8350 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8351
8352 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8353 if (flag_unsafe_math_optimizations
8354 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8355 {
8356 const REAL_VALUE_TYPE sqrt2_trunc
8357 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8358 return fold_build2_loc (loc, MULT_EXPR, type,
8359 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8360 build_real (type, sqrt2_trunc));
8361 }
8362
8363 return NULL_TREE;
8364 }
8365
8366
8367 /* Fold a builtin function call to pow, powf, or powl. Return
8368 NULL_TREE if no simplification can be made. */
8369 static tree
8370 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8371 {
8372 tree res;
8373
8374 if (!validate_arg (arg0, REAL_TYPE)
8375 || !validate_arg (arg1, REAL_TYPE))
8376 return NULL_TREE;
8377
8378 /* Calculate the result when the argument is a constant. */
8379 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8380 return res;
8381
8382 /* Optimize pow(1.0,y) = 1.0. */
8383 if (real_onep (arg0))
8384 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8385
8386 if (TREE_CODE (arg1) == REAL_CST
8387 && !TREE_OVERFLOW (arg1))
8388 {
8389 REAL_VALUE_TYPE cint;
8390 REAL_VALUE_TYPE c;
8391 HOST_WIDE_INT n;
8392
8393 c = TREE_REAL_CST (arg1);
8394
8395 /* Optimize pow(x,0.0) = 1.0. */
8396 if (REAL_VALUES_EQUAL (c, dconst0))
8397 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8398 arg0);
8399
8400 /* Optimize pow(x,1.0) = x. */
8401 if (REAL_VALUES_EQUAL (c, dconst1))
8402 return arg0;
8403
8404 /* Optimize pow(x,-1.0) = 1.0/x. */
8405 if (REAL_VALUES_EQUAL (c, dconstm1))
8406 return fold_build2_loc (loc, RDIV_EXPR, type,
8407 build_real (type, dconst1), arg0);
8408
8409 /* Optimize pow(x,0.5) = sqrt(x). */
8410 if (flag_unsafe_math_optimizations
8411 && REAL_VALUES_EQUAL (c, dconsthalf))
8412 {
8413 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8414
8415 if (sqrtfn != NULL_TREE)
8416 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8417 }
8418
8419 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8420 if (flag_unsafe_math_optimizations)
8421 {
8422 const REAL_VALUE_TYPE dconstroot
8423 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8424
8425 if (REAL_VALUES_EQUAL (c, dconstroot))
8426 {
8427 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8428 if (cbrtfn != NULL_TREE)
8429 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8430 }
8431 }
8432
8433 /* Check for an integer exponent. */
8434 n = real_to_integer (&c);
8435 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8436 if (real_identical (&c, &cint))
8437 {
8438 /* Attempt to evaluate pow at compile-time, unless this should
8439 raise an exception. */
8440 if (TREE_CODE (arg0) == REAL_CST
8441 && !TREE_OVERFLOW (arg0)
8442 && (n > 0
8443 || (!flag_trapping_math && !flag_errno_math)
8444 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8445 {
8446 REAL_VALUE_TYPE x;
8447 bool inexact;
8448
8449 x = TREE_REAL_CST (arg0);
8450 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8451 if (flag_unsafe_math_optimizations || !inexact)
8452 return build_real (type, x);
8453 }
8454
8455 /* Strip sign ops from even integer powers. */
8456 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8457 {
8458 tree narg0 = fold_strip_sign_ops (arg0);
8459 if (narg0)
8460 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8461 }
8462 }
8463 }
8464
8465 if (flag_unsafe_math_optimizations)
8466 {
8467 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8468
8469 /* Optimize pow(expN(x),y) = expN(x*y). */
8470 if (BUILTIN_EXPONENT_P (fcode))
8471 {
8472 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8473 tree arg = CALL_EXPR_ARG (arg0, 0);
8474 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8475 return build_call_expr_loc (loc, expfn, 1, arg);
8476 }
8477
8478 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8479 if (BUILTIN_SQRT_P (fcode))
8480 {
8481 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8482 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8483 build_real (type, dconsthalf));
8484 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8485 }
8486
8487 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8488 if (BUILTIN_CBRT_P (fcode))
8489 {
8490 tree arg = CALL_EXPR_ARG (arg0, 0);
8491 if (tree_expr_nonnegative_p (arg))
8492 {
8493 const REAL_VALUE_TYPE dconstroot
8494 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8495 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8496 build_real (type, dconstroot));
8497 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8498 }
8499 }
8500
8501 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8502 if (fcode == BUILT_IN_POW
8503 || fcode == BUILT_IN_POWF
8504 || fcode == BUILT_IN_POWL)
8505 {
8506 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8507 if (tree_expr_nonnegative_p (arg00))
8508 {
8509 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8510 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8511 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8512 }
8513 }
8514 }
8515
8516 return NULL_TREE;
8517 }
8518
8519 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8520 Return NULL_TREE if no simplification can be made. */
8521 static tree
8522 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8523 tree arg0, tree arg1, tree type)
8524 {
8525 if (!validate_arg (arg0, REAL_TYPE)
8526 || !validate_arg (arg1, INTEGER_TYPE))
8527 return NULL_TREE;
8528
8529 /* Optimize pow(1.0,y) = 1.0. */
8530 if (real_onep (arg0))
8531 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8532
8533 if (tree_fits_shwi_p (arg1))
8534 {
8535 HOST_WIDE_INT c = tree_to_shwi (arg1);
8536
8537 /* Evaluate powi at compile-time. */
8538 if (TREE_CODE (arg0) == REAL_CST
8539 && !TREE_OVERFLOW (arg0))
8540 {
8541 REAL_VALUE_TYPE x;
8542 x = TREE_REAL_CST (arg0);
8543 real_powi (&x, TYPE_MODE (type), &x, c);
8544 return build_real (type, x);
8545 }
8546
8547 /* Optimize pow(x,0) = 1.0. */
8548 if (c == 0)
8549 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8550 arg0);
8551
8552 /* Optimize pow(x,1) = x. */
8553 if (c == 1)
8554 return arg0;
8555
8556 /* Optimize pow(x,-1) = 1.0/x. */
8557 if (c == -1)
8558 return fold_build2_loc (loc, RDIV_EXPR, type,
8559 build_real (type, dconst1), arg0);
8560 }
8561
8562 return NULL_TREE;
8563 }
8564
8565 /* A subroutine of fold_builtin to fold the various exponent
8566 functions. Return NULL_TREE if no simplification can be made.
8567 FUNC is the corresponding MPFR exponent function. */
8568
8569 static tree
8570 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8571 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8572 {
8573 if (validate_arg (arg, REAL_TYPE))
8574 {
8575 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8576 tree res;
8577
8578 /* Calculate the result when the argument is a constant. */
8579 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8580 return res;
8581
8582 /* Optimize expN(logN(x)) = x. */
8583 if (flag_unsafe_math_optimizations)
8584 {
8585 const enum built_in_function fcode = builtin_mathfn_code (arg);
8586
8587 if ((func == mpfr_exp
8588 && (fcode == BUILT_IN_LOG
8589 || fcode == BUILT_IN_LOGF
8590 || fcode == BUILT_IN_LOGL))
8591 || (func == mpfr_exp2
8592 && (fcode == BUILT_IN_LOG2
8593 || fcode == BUILT_IN_LOG2F
8594 || fcode == BUILT_IN_LOG2L))
8595 || (func == mpfr_exp10
8596 && (fcode == BUILT_IN_LOG10
8597 || fcode == BUILT_IN_LOG10F
8598 || fcode == BUILT_IN_LOG10L)))
8599 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8600 }
8601 }
8602
8603 return NULL_TREE;
8604 }
8605
8606 /* Return true if VAR is a VAR_DECL or a component thereof. */
8607
8608 static bool
8609 var_decl_component_p (tree var)
8610 {
8611 tree inner = var;
8612 while (handled_component_p (inner))
8613 inner = TREE_OPERAND (inner, 0);
8614 return SSA_VAR_P (inner);
8615 }
8616
8617 /* Fold function call to builtin memset. Return
8618 NULL_TREE if no simplification can be made. */
8619
8620 static tree
8621 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8622 tree type, bool ignore)
8623 {
8624 tree var, ret, etype;
8625 unsigned HOST_WIDE_INT length, cval;
8626
8627 if (! validate_arg (dest, POINTER_TYPE)
8628 || ! validate_arg (c, INTEGER_TYPE)
8629 || ! validate_arg (len, INTEGER_TYPE))
8630 return NULL_TREE;
8631
8632 if (! tree_fits_uhwi_p (len))
8633 return NULL_TREE;
8634
8635 /* If the LEN parameter is zero, return DEST. */
8636 if (integer_zerop (len))
8637 return omit_one_operand_loc (loc, type, dest, c);
8638
8639 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8640 return NULL_TREE;
8641
8642 var = dest;
8643 STRIP_NOPS (var);
8644 if (TREE_CODE (var) != ADDR_EXPR)
8645 return NULL_TREE;
8646
8647 var = TREE_OPERAND (var, 0);
8648 if (TREE_THIS_VOLATILE (var))
8649 return NULL_TREE;
8650
8651 etype = TREE_TYPE (var);
8652 if (TREE_CODE (etype) == ARRAY_TYPE)
8653 etype = TREE_TYPE (etype);
8654
8655 if (!INTEGRAL_TYPE_P (etype)
8656 && !POINTER_TYPE_P (etype))
8657 return NULL_TREE;
8658
8659 if (! var_decl_component_p (var))
8660 return NULL_TREE;
8661
8662 length = tree_to_uhwi (len);
8663 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8664 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8665 return NULL_TREE;
8666
8667 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8668 return NULL_TREE;
8669
8670 if (integer_zerop (c))
8671 cval = 0;
8672 else
8673 {
8674 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8675 return NULL_TREE;
8676
8677 cval = TREE_INT_CST_LOW (c);
8678 cval &= 0xff;
8679 cval |= cval << 8;
8680 cval |= cval << 16;
8681 cval |= (cval << 31) << 1;
8682 }
8683
8684 ret = build_int_cst_type (etype, cval);
8685 var = build_fold_indirect_ref_loc (loc,
8686 fold_convert_loc (loc,
8687 build_pointer_type (etype),
8688 dest));
8689 ret = build2 (MODIFY_EXPR, etype, var, ret);
8690 if (ignore)
8691 return ret;
8692
8693 return omit_one_operand_loc (loc, type, dest, ret);
8694 }
8695
8696 /* Fold function call to builtin memset. Return
8697 NULL_TREE if no simplification can be made. */
8698
8699 static tree
8700 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8701 {
8702 if (! validate_arg (dest, POINTER_TYPE)
8703 || ! validate_arg (size, INTEGER_TYPE))
8704 return NULL_TREE;
8705
8706 if (!ignore)
8707 return NULL_TREE;
8708
8709 /* New argument list transforming bzero(ptr x, int y) to
8710 memset(ptr x, int 0, size_t y). This is done this way
8711 so that if it isn't expanded inline, we fallback to
8712 calling bzero instead of memset. */
8713
8714 return fold_builtin_memset (loc, dest, integer_zero_node,
8715 fold_convert_loc (loc, size_type_node, size),
8716 void_type_node, ignore);
8717 }
8718
8719 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8720 NULL_TREE if no simplification can be made.
8721 If ENDP is 0, return DEST (like memcpy).
8722 If ENDP is 1, return DEST+LEN (like mempcpy).
8723 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8724 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8725 (memmove). */
8726
8727 static tree
8728 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8729 tree len, tree type, bool ignore, int endp)
8730 {
8731 tree destvar, srcvar, expr;
8732
8733 if (! validate_arg (dest, POINTER_TYPE)
8734 || ! validate_arg (src, POINTER_TYPE)
8735 || ! validate_arg (len, INTEGER_TYPE))
8736 return NULL_TREE;
8737
8738 /* If the LEN parameter is zero, return DEST. */
8739 if (integer_zerop (len))
8740 return omit_one_operand_loc (loc, type, dest, src);
8741
8742 /* If SRC and DEST are the same (and not volatile), return
8743 DEST{,+LEN,+LEN-1}. */
8744 if (operand_equal_p (src, dest, 0))
8745 expr = len;
8746 else
8747 {
8748 tree srctype, desttype;
8749 unsigned int src_align, dest_align;
8750 tree off0;
8751
8752 if (endp == 3)
8753 {
8754 src_align = get_pointer_alignment (src);
8755 dest_align = get_pointer_alignment (dest);
8756
8757 /* Both DEST and SRC must be pointer types.
8758 ??? This is what old code did. Is the testing for pointer types
8759 really mandatory?
8760
8761 If either SRC is readonly or length is 1, we can use memcpy. */
8762 if (!dest_align || !src_align)
8763 return NULL_TREE;
8764 if (readonly_data_expr (src)
8765 || (tree_fits_uhwi_p (len)
8766 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8767 >= tree_to_uhwi (len))))
8768 {
8769 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8770 if (!fn)
8771 return NULL_TREE;
8772 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8773 }
8774
8775 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8776 if (TREE_CODE (src) == ADDR_EXPR
8777 && TREE_CODE (dest) == ADDR_EXPR)
8778 {
8779 tree src_base, dest_base, fn;
8780 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8781 HOST_WIDE_INT size = -1;
8782 HOST_WIDE_INT maxsize = -1;
8783
8784 srcvar = TREE_OPERAND (src, 0);
8785 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8786 &size, &maxsize);
8787 destvar = TREE_OPERAND (dest, 0);
8788 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8789 &size, &maxsize);
8790 if (tree_fits_uhwi_p (len))
8791 maxsize = tree_to_uhwi (len);
8792 else
8793 maxsize = -1;
8794 src_offset /= BITS_PER_UNIT;
8795 dest_offset /= BITS_PER_UNIT;
8796 if (SSA_VAR_P (src_base)
8797 && SSA_VAR_P (dest_base))
8798 {
8799 if (operand_equal_p (src_base, dest_base, 0)
8800 && ranges_overlap_p (src_offset, maxsize,
8801 dest_offset, maxsize))
8802 return NULL_TREE;
8803 }
8804 else if (TREE_CODE (src_base) == MEM_REF
8805 && TREE_CODE (dest_base) == MEM_REF)
8806 {
8807 double_int off;
8808 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8809 TREE_OPERAND (dest_base, 0), 0))
8810 return NULL_TREE;
8811 off = mem_ref_offset (src_base) +
8812 double_int::from_shwi (src_offset);
8813 if (!off.fits_shwi ())
8814 return NULL_TREE;
8815 src_offset = off.low;
8816 off = mem_ref_offset (dest_base) +
8817 double_int::from_shwi (dest_offset);
8818 if (!off.fits_shwi ())
8819 return NULL_TREE;
8820 dest_offset = off.low;
8821 if (ranges_overlap_p (src_offset, maxsize,
8822 dest_offset, maxsize))
8823 return NULL_TREE;
8824 }
8825 else
8826 return NULL_TREE;
8827
8828 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8829 if (!fn)
8830 return NULL_TREE;
8831 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8832 }
8833
8834 /* If the destination and source do not alias optimize into
8835 memcpy as well. */
8836 if ((is_gimple_min_invariant (dest)
8837 || TREE_CODE (dest) == SSA_NAME)
8838 && (is_gimple_min_invariant (src)
8839 || TREE_CODE (src) == SSA_NAME))
8840 {
8841 ao_ref destr, srcr;
8842 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8843 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8844 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8845 {
8846 tree fn;
8847 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8848 if (!fn)
8849 return NULL_TREE;
8850 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8851 }
8852 }
8853
8854 return NULL_TREE;
8855 }
8856
8857 if (!tree_fits_shwi_p (len))
8858 return NULL_TREE;
8859 /* FIXME:
8860 This logic lose for arguments like (type *)malloc (sizeof (type)),
8861 since we strip the casts of up to VOID return value from malloc.
8862 Perhaps we ought to inherit type from non-VOID argument here? */
8863 STRIP_NOPS (src);
8864 STRIP_NOPS (dest);
8865 if (!POINTER_TYPE_P (TREE_TYPE (src))
8866 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8867 return NULL_TREE;
8868 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8869 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8870 {
8871 tree tem = TREE_OPERAND (src, 0);
8872 STRIP_NOPS (tem);
8873 if (tem != TREE_OPERAND (src, 0))
8874 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8875 }
8876 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8877 {
8878 tree tem = TREE_OPERAND (dest, 0);
8879 STRIP_NOPS (tem);
8880 if (tem != TREE_OPERAND (dest, 0))
8881 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8882 }
8883 srctype = TREE_TYPE (TREE_TYPE (src));
8884 if (TREE_CODE (srctype) == ARRAY_TYPE
8885 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8886 {
8887 srctype = TREE_TYPE (srctype);
8888 STRIP_NOPS (src);
8889 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8890 }
8891 desttype = TREE_TYPE (TREE_TYPE (dest));
8892 if (TREE_CODE (desttype) == ARRAY_TYPE
8893 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8894 {
8895 desttype = TREE_TYPE (desttype);
8896 STRIP_NOPS (dest);
8897 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8898 }
8899 if (TREE_ADDRESSABLE (srctype)
8900 || TREE_ADDRESSABLE (desttype))
8901 return NULL_TREE;
8902
8903 src_align = get_pointer_alignment (src);
8904 dest_align = get_pointer_alignment (dest);
8905 if (dest_align < TYPE_ALIGN (desttype)
8906 || src_align < TYPE_ALIGN (srctype))
8907 return NULL_TREE;
8908
8909 if (!ignore)
8910 dest = builtin_save_expr (dest);
8911
8912 /* Build accesses at offset zero with a ref-all character type. */
8913 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8914 ptr_mode, true), 0);
8915
8916 /* For -fsanitize={bool,enum} make sure the load isn't performed in
8917 the bool or enum type. */
8918 if (((flag_sanitize & SANITIZE_BOOL)
8919 && TREE_CODE (desttype) == BOOLEAN_TYPE)
8920 || ((flag_sanitize & SANITIZE_ENUM)
8921 && TREE_CODE (desttype) == ENUMERAL_TYPE))
8922 {
8923 tree destitype
8924 = lang_hooks.types.type_for_mode (TYPE_MODE (desttype),
8925 TYPE_UNSIGNED (desttype));
8926 desttype = build_aligned_type (destitype, TYPE_ALIGN (desttype));
8927 }
8928 if (((flag_sanitize & SANITIZE_BOOL)
8929 && TREE_CODE (srctype) == BOOLEAN_TYPE)
8930 || ((flag_sanitize & SANITIZE_ENUM)
8931 && TREE_CODE (srctype) == ENUMERAL_TYPE))
8932 {
8933 tree srcitype
8934 = lang_hooks.types.type_for_mode (TYPE_MODE (srctype),
8935 TYPE_UNSIGNED (srctype));
8936 srctype = build_aligned_type (srcitype, TYPE_ALIGN (srctype));
8937 }
8938
8939 destvar = dest;
8940 STRIP_NOPS (destvar);
8941 if (TREE_CODE (destvar) == ADDR_EXPR
8942 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8943 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8944 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8945 else
8946 destvar = NULL_TREE;
8947
8948 srcvar = src;
8949 STRIP_NOPS (srcvar);
8950 if (TREE_CODE (srcvar) == ADDR_EXPR
8951 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8952 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8953 {
8954 if (!destvar
8955 || src_align >= TYPE_ALIGN (desttype))
8956 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8957 srcvar, off0);
8958 else if (!STRICT_ALIGNMENT)
8959 {
8960 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8961 src_align);
8962 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8963 }
8964 else
8965 srcvar = NULL_TREE;
8966 }
8967 else
8968 srcvar = NULL_TREE;
8969
8970 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8971 return NULL_TREE;
8972
8973 if (srcvar == NULL_TREE)
8974 {
8975 STRIP_NOPS (src);
8976 if (src_align >= TYPE_ALIGN (desttype))
8977 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8978 else
8979 {
8980 if (STRICT_ALIGNMENT)
8981 return NULL_TREE;
8982 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8983 src_align);
8984 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8985 }
8986 }
8987 else if (destvar == NULL_TREE)
8988 {
8989 STRIP_NOPS (dest);
8990 if (dest_align >= TYPE_ALIGN (srctype))
8991 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8992 else
8993 {
8994 if (STRICT_ALIGNMENT)
8995 return NULL_TREE;
8996 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8997 dest_align);
8998 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8999 }
9000 }
9001
9002 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9003 }
9004
9005 if (ignore)
9006 return expr;
9007
9008 if (endp == 0 || endp == 3)
9009 return omit_one_operand_loc (loc, type, dest, expr);
9010
9011 if (expr == len)
9012 expr = NULL_TREE;
9013
9014 if (endp == 2)
9015 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9016 ssize_int (1));
9017
9018 dest = fold_build_pointer_plus_loc (loc, dest, len);
9019 dest = fold_convert_loc (loc, type, dest);
9020 if (expr)
9021 dest = omit_one_operand_loc (loc, type, dest, expr);
9022 return dest;
9023 }
9024
9025 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9026 If LEN is not NULL, it represents the length of the string to be
9027 copied. Return NULL_TREE if no simplification can be made. */
9028
9029 tree
9030 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9031 {
9032 tree fn;
9033
9034 if (!validate_arg (dest, POINTER_TYPE)
9035 || !validate_arg (src, POINTER_TYPE))
9036 return NULL_TREE;
9037
9038 /* If SRC and DEST are the same (and not volatile), return DEST. */
9039 if (operand_equal_p (src, dest, 0))
9040 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9041
9042 if (optimize_function_for_size_p (cfun))
9043 return NULL_TREE;
9044
9045 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9046 if (!fn)
9047 return NULL_TREE;
9048
9049 if (!len)
9050 {
9051 len = c_strlen (src, 1);
9052 if (! len || TREE_SIDE_EFFECTS (len))
9053 return NULL_TREE;
9054 }
9055
9056 len = fold_convert_loc (loc, size_type_node, len);
9057 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9058 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9059 build_call_expr_loc (loc, fn, 3, dest, src, len));
9060 }
9061
9062 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9063 Return NULL_TREE if no simplification can be made. */
9064
9065 static tree
9066 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9067 {
9068 tree fn, len, lenp1, call, type;
9069
9070 if (!validate_arg (dest, POINTER_TYPE)
9071 || !validate_arg (src, POINTER_TYPE))
9072 return NULL_TREE;
9073
9074 len = c_strlen (src, 1);
9075 if (!len
9076 || TREE_CODE (len) != INTEGER_CST)
9077 return NULL_TREE;
9078
9079 if (optimize_function_for_size_p (cfun)
9080 /* If length is zero it's small enough. */
9081 && !integer_zerop (len))
9082 return NULL_TREE;
9083
9084 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9085 if (!fn)
9086 return NULL_TREE;
9087
9088 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9089 fold_convert_loc (loc, size_type_node, len),
9090 build_int_cst (size_type_node, 1));
9091 /* We use dest twice in building our expression. Save it from
9092 multiple expansions. */
9093 dest = builtin_save_expr (dest);
9094 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9095
9096 type = TREE_TYPE (TREE_TYPE (fndecl));
9097 dest = fold_build_pointer_plus_loc (loc, dest, len);
9098 dest = fold_convert_loc (loc, type, dest);
9099 dest = omit_one_operand_loc (loc, type, dest, call);
9100 return dest;
9101 }
9102
9103 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9104 If SLEN is not NULL, it represents the length of the source string.
9105 Return NULL_TREE if no simplification can be made. */
9106
9107 tree
9108 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9109 tree src, tree len, tree slen)
9110 {
9111 tree fn;
9112
9113 if (!validate_arg (dest, POINTER_TYPE)
9114 || !validate_arg (src, POINTER_TYPE)
9115 || !validate_arg (len, INTEGER_TYPE))
9116 return NULL_TREE;
9117
9118 /* If the LEN parameter is zero, return DEST. */
9119 if (integer_zerop (len))
9120 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9121
9122 /* We can't compare slen with len as constants below if len is not a
9123 constant. */
9124 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9125 return NULL_TREE;
9126
9127 if (!slen)
9128 slen = c_strlen (src, 1);
9129
9130 /* Now, we must be passed a constant src ptr parameter. */
9131 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9132 return NULL_TREE;
9133
9134 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9135
9136 /* We do not support simplification of this case, though we do
9137 support it when expanding trees into RTL. */
9138 /* FIXME: generate a call to __builtin_memset. */
9139 if (tree_int_cst_lt (slen, len))
9140 return NULL_TREE;
9141
9142 /* OK transform into builtin memcpy. */
9143 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9144 if (!fn)
9145 return NULL_TREE;
9146
9147 len = fold_convert_loc (loc, size_type_node, len);
9148 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9149 build_call_expr_loc (loc, fn, 3, dest, src, len));
9150 }
9151
9152 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9153 arguments to the call, and TYPE is its return type.
9154 Return NULL_TREE if no simplification can be made. */
9155
9156 static tree
9157 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9158 {
9159 if (!validate_arg (arg1, POINTER_TYPE)
9160 || !validate_arg (arg2, INTEGER_TYPE)
9161 || !validate_arg (len, INTEGER_TYPE))
9162 return NULL_TREE;
9163 else
9164 {
9165 const char *p1;
9166
9167 if (TREE_CODE (arg2) != INTEGER_CST
9168 || !tree_fits_uhwi_p (len))
9169 return NULL_TREE;
9170
9171 p1 = c_getstr (arg1);
9172 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9173 {
9174 char c;
9175 const char *r;
9176 tree tem;
9177
9178 if (target_char_cast (arg2, &c))
9179 return NULL_TREE;
9180
9181 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9182
9183 if (r == NULL)
9184 return build_int_cst (TREE_TYPE (arg1), 0);
9185
9186 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9187 return fold_convert_loc (loc, type, tem);
9188 }
9189 return NULL_TREE;
9190 }
9191 }
9192
9193 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9194 Return NULL_TREE if no simplification can be made. */
9195
9196 static tree
9197 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9198 {
9199 const char *p1, *p2;
9200
9201 if (!validate_arg (arg1, POINTER_TYPE)
9202 || !validate_arg (arg2, POINTER_TYPE)
9203 || !validate_arg (len, INTEGER_TYPE))
9204 return NULL_TREE;
9205
9206 /* If the LEN parameter is zero, return zero. */
9207 if (integer_zerop (len))
9208 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9209 arg1, arg2);
9210
9211 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9212 if (operand_equal_p (arg1, arg2, 0))
9213 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9214
9215 p1 = c_getstr (arg1);
9216 p2 = c_getstr (arg2);
9217
9218 /* If all arguments are constant, and the value of len is not greater
9219 than the lengths of arg1 and arg2, evaluate at compile-time. */
9220 if (tree_fits_uhwi_p (len) && p1 && p2
9221 && compare_tree_int (len, strlen (p1) + 1) <= 0
9222 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9223 {
9224 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9225
9226 if (r > 0)
9227 return integer_one_node;
9228 else if (r < 0)
9229 return integer_minus_one_node;
9230 else
9231 return integer_zero_node;
9232 }
9233
9234 /* If len parameter is one, return an expression corresponding to
9235 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9236 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9237 {
9238 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9239 tree cst_uchar_ptr_node
9240 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9241
9242 tree ind1
9243 = fold_convert_loc (loc, integer_type_node,
9244 build1 (INDIRECT_REF, cst_uchar_node,
9245 fold_convert_loc (loc,
9246 cst_uchar_ptr_node,
9247 arg1)));
9248 tree ind2
9249 = fold_convert_loc (loc, integer_type_node,
9250 build1 (INDIRECT_REF, cst_uchar_node,
9251 fold_convert_loc (loc,
9252 cst_uchar_ptr_node,
9253 arg2)));
9254 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9255 }
9256
9257 return NULL_TREE;
9258 }
9259
9260 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9261 Return NULL_TREE if no simplification can be made. */
9262
9263 static tree
9264 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9265 {
9266 const char *p1, *p2;
9267
9268 if (!validate_arg (arg1, POINTER_TYPE)
9269 || !validate_arg (arg2, POINTER_TYPE))
9270 return NULL_TREE;
9271
9272 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9273 if (operand_equal_p (arg1, arg2, 0))
9274 return integer_zero_node;
9275
9276 p1 = c_getstr (arg1);
9277 p2 = c_getstr (arg2);
9278
9279 if (p1 && p2)
9280 {
9281 const int i = strcmp (p1, p2);
9282 if (i < 0)
9283 return integer_minus_one_node;
9284 else if (i > 0)
9285 return integer_one_node;
9286 else
9287 return integer_zero_node;
9288 }
9289
9290 /* If the second arg is "", return *(const unsigned char*)arg1. */
9291 if (p2 && *p2 == '\0')
9292 {
9293 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9294 tree cst_uchar_ptr_node
9295 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9296
9297 return fold_convert_loc (loc, integer_type_node,
9298 build1 (INDIRECT_REF, cst_uchar_node,
9299 fold_convert_loc (loc,
9300 cst_uchar_ptr_node,
9301 arg1)));
9302 }
9303
9304 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9305 if (p1 && *p1 == '\0')
9306 {
9307 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9308 tree cst_uchar_ptr_node
9309 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9310
9311 tree temp
9312 = fold_convert_loc (loc, integer_type_node,
9313 build1 (INDIRECT_REF, cst_uchar_node,
9314 fold_convert_loc (loc,
9315 cst_uchar_ptr_node,
9316 arg2)));
9317 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9318 }
9319
9320 return NULL_TREE;
9321 }
9322
9323 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9324 Return NULL_TREE if no simplification can be made. */
9325
9326 static tree
9327 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9328 {
9329 const char *p1, *p2;
9330
9331 if (!validate_arg (arg1, POINTER_TYPE)
9332 || !validate_arg (arg2, POINTER_TYPE)
9333 || !validate_arg (len, INTEGER_TYPE))
9334 return NULL_TREE;
9335
9336 /* If the LEN parameter is zero, return zero. */
9337 if (integer_zerop (len))
9338 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9339 arg1, arg2);
9340
9341 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9342 if (operand_equal_p (arg1, arg2, 0))
9343 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9344
9345 p1 = c_getstr (arg1);
9346 p2 = c_getstr (arg2);
9347
9348 if (tree_fits_uhwi_p (len) && p1 && p2)
9349 {
9350 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9351 if (i > 0)
9352 return integer_one_node;
9353 else if (i < 0)
9354 return integer_minus_one_node;
9355 else
9356 return integer_zero_node;
9357 }
9358
9359 /* If the second arg is "", and the length is greater than zero,
9360 return *(const unsigned char*)arg1. */
9361 if (p2 && *p2 == '\0'
9362 && TREE_CODE (len) == INTEGER_CST
9363 && tree_int_cst_sgn (len) == 1)
9364 {
9365 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9366 tree cst_uchar_ptr_node
9367 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9368
9369 return fold_convert_loc (loc, integer_type_node,
9370 build1 (INDIRECT_REF, cst_uchar_node,
9371 fold_convert_loc (loc,
9372 cst_uchar_ptr_node,
9373 arg1)));
9374 }
9375
9376 /* If the first arg is "", and the length is greater than zero,
9377 return -*(const unsigned char*)arg2. */
9378 if (p1 && *p1 == '\0'
9379 && TREE_CODE (len) == INTEGER_CST
9380 && tree_int_cst_sgn (len) == 1)
9381 {
9382 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9383 tree cst_uchar_ptr_node
9384 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9385
9386 tree temp = fold_convert_loc (loc, integer_type_node,
9387 build1 (INDIRECT_REF, cst_uchar_node,
9388 fold_convert_loc (loc,
9389 cst_uchar_ptr_node,
9390 arg2)));
9391 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9392 }
9393
9394 /* If len parameter is one, return an expression corresponding to
9395 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9396 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9397 {
9398 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9399 tree cst_uchar_ptr_node
9400 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9401
9402 tree ind1 = fold_convert_loc (loc, integer_type_node,
9403 build1 (INDIRECT_REF, cst_uchar_node,
9404 fold_convert_loc (loc,
9405 cst_uchar_ptr_node,
9406 arg1)));
9407 tree ind2 = fold_convert_loc (loc, integer_type_node,
9408 build1 (INDIRECT_REF, cst_uchar_node,
9409 fold_convert_loc (loc,
9410 cst_uchar_ptr_node,
9411 arg2)));
9412 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9413 }
9414
9415 return NULL_TREE;
9416 }
9417
9418 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9419 ARG. Return NULL_TREE if no simplification can be made. */
9420
9421 static tree
9422 fold_builtin_signbit (location_t loc, tree arg, tree type)
9423 {
9424 if (!validate_arg (arg, REAL_TYPE))
9425 return NULL_TREE;
9426
9427 /* If ARG is a compile-time constant, determine the result. */
9428 if (TREE_CODE (arg) == REAL_CST
9429 && !TREE_OVERFLOW (arg))
9430 {
9431 REAL_VALUE_TYPE c;
9432
9433 c = TREE_REAL_CST (arg);
9434 return (REAL_VALUE_NEGATIVE (c)
9435 ? build_one_cst (type)
9436 : build_zero_cst (type));
9437 }
9438
9439 /* If ARG is non-negative, the result is always zero. */
9440 if (tree_expr_nonnegative_p (arg))
9441 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9442
9443 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9444 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9445 return fold_convert (type,
9446 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9447 build_real (TREE_TYPE (arg), dconst0)));
9448
9449 return NULL_TREE;
9450 }
9451
9452 /* Fold function call to builtin copysign, copysignf or copysignl with
9453 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9454 be made. */
9455
9456 static tree
9457 fold_builtin_copysign (location_t loc, tree fndecl,
9458 tree arg1, tree arg2, tree type)
9459 {
9460 tree tem;
9461
9462 if (!validate_arg (arg1, REAL_TYPE)
9463 || !validate_arg (arg2, REAL_TYPE))
9464 return NULL_TREE;
9465
9466 /* copysign(X,X) is X. */
9467 if (operand_equal_p (arg1, arg2, 0))
9468 return fold_convert_loc (loc, type, arg1);
9469
9470 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9471 if (TREE_CODE (arg1) == REAL_CST
9472 && TREE_CODE (arg2) == REAL_CST
9473 && !TREE_OVERFLOW (arg1)
9474 && !TREE_OVERFLOW (arg2))
9475 {
9476 REAL_VALUE_TYPE c1, c2;
9477
9478 c1 = TREE_REAL_CST (arg1);
9479 c2 = TREE_REAL_CST (arg2);
9480 /* c1.sign := c2.sign. */
9481 real_copysign (&c1, &c2);
9482 return build_real (type, c1);
9483 }
9484
9485 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9486 Remember to evaluate Y for side-effects. */
9487 if (tree_expr_nonnegative_p (arg2))
9488 return omit_one_operand_loc (loc, type,
9489 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9490 arg2);
9491
9492 /* Strip sign changing operations for the first argument. */
9493 tem = fold_strip_sign_ops (arg1);
9494 if (tem)
9495 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9496
9497 return NULL_TREE;
9498 }
9499
9500 /* Fold a call to builtin isascii with argument ARG. */
9501
9502 static tree
9503 fold_builtin_isascii (location_t loc, tree arg)
9504 {
9505 if (!validate_arg (arg, INTEGER_TYPE))
9506 return NULL_TREE;
9507 else
9508 {
9509 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9510 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9511 build_int_cst (integer_type_node,
9512 ~ (unsigned HOST_WIDE_INT) 0x7f));
9513 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9514 arg, integer_zero_node);
9515 }
9516 }
9517
9518 /* Fold a call to builtin toascii with argument ARG. */
9519
9520 static tree
9521 fold_builtin_toascii (location_t loc, tree arg)
9522 {
9523 if (!validate_arg (arg, INTEGER_TYPE))
9524 return NULL_TREE;
9525
9526 /* Transform toascii(c) -> (c & 0x7f). */
9527 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9528 build_int_cst (integer_type_node, 0x7f));
9529 }
9530
9531 /* Fold a call to builtin isdigit with argument ARG. */
9532
9533 static tree
9534 fold_builtin_isdigit (location_t loc, tree arg)
9535 {
9536 if (!validate_arg (arg, INTEGER_TYPE))
9537 return NULL_TREE;
9538 else
9539 {
9540 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9541 /* According to the C standard, isdigit is unaffected by locale.
9542 However, it definitely is affected by the target character set. */
9543 unsigned HOST_WIDE_INT target_digit0
9544 = lang_hooks.to_target_charset ('0');
9545
9546 if (target_digit0 == 0)
9547 return NULL_TREE;
9548
9549 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9550 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9551 build_int_cst (unsigned_type_node, target_digit0));
9552 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9553 build_int_cst (unsigned_type_node, 9));
9554 }
9555 }
9556
9557 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9558
9559 static tree
9560 fold_builtin_fabs (location_t loc, tree arg, tree type)
9561 {
9562 if (!validate_arg (arg, REAL_TYPE))
9563 return NULL_TREE;
9564
9565 arg = fold_convert_loc (loc, type, arg);
9566 if (TREE_CODE (arg) == REAL_CST)
9567 return fold_abs_const (arg, type);
9568 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9569 }
9570
9571 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9572
9573 static tree
9574 fold_builtin_abs (location_t loc, tree arg, tree type)
9575 {
9576 if (!validate_arg (arg, INTEGER_TYPE))
9577 return NULL_TREE;
9578
9579 arg = fold_convert_loc (loc, type, arg);
9580 if (TREE_CODE (arg) == INTEGER_CST)
9581 return fold_abs_const (arg, type);
9582 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9583 }
9584
9585 /* Fold a fma operation with arguments ARG[012]. */
9586
9587 tree
9588 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9589 tree type, tree arg0, tree arg1, tree arg2)
9590 {
9591 if (TREE_CODE (arg0) == REAL_CST
9592 && TREE_CODE (arg1) == REAL_CST
9593 && TREE_CODE (arg2) == REAL_CST)
9594 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9595
9596 return NULL_TREE;
9597 }
9598
9599 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9600
9601 static tree
9602 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9603 {
9604 if (validate_arg (arg0, REAL_TYPE)
9605 && validate_arg (arg1, REAL_TYPE)
9606 && validate_arg (arg2, REAL_TYPE))
9607 {
9608 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9609 if (tem)
9610 return tem;
9611
9612 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9613 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9614 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9615 }
9616 return NULL_TREE;
9617 }
9618
9619 /* Fold a call to builtin fmin or fmax. */
9620
9621 static tree
9622 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9623 tree type, bool max)
9624 {
9625 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9626 {
9627 /* Calculate the result when the argument is a constant. */
9628 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9629
9630 if (res)
9631 return res;
9632
9633 /* If either argument is NaN, return the other one. Avoid the
9634 transformation if we get (and honor) a signalling NaN. Using
9635 omit_one_operand() ensures we create a non-lvalue. */
9636 if (TREE_CODE (arg0) == REAL_CST
9637 && real_isnan (&TREE_REAL_CST (arg0))
9638 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9639 || ! TREE_REAL_CST (arg0).signalling))
9640 return omit_one_operand_loc (loc, type, arg1, arg0);
9641 if (TREE_CODE (arg1) == REAL_CST
9642 && real_isnan (&TREE_REAL_CST (arg1))
9643 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9644 || ! TREE_REAL_CST (arg1).signalling))
9645 return omit_one_operand_loc (loc, type, arg0, arg1);
9646
9647 /* Transform fmin/fmax(x,x) -> x. */
9648 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9649 return omit_one_operand_loc (loc, type, arg0, arg1);
9650
9651 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9652 functions to return the numeric arg if the other one is NaN.
9653 These tree codes don't honor that, so only transform if
9654 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9655 handled, so we don't have to worry about it either. */
9656 if (flag_finite_math_only)
9657 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9658 fold_convert_loc (loc, type, arg0),
9659 fold_convert_loc (loc, type, arg1));
9660 }
9661 return NULL_TREE;
9662 }
9663
9664 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9665
9666 static tree
9667 fold_builtin_carg (location_t loc, tree arg, tree type)
9668 {
9669 if (validate_arg (arg, COMPLEX_TYPE)
9670 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9671 {
9672 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9673
9674 if (atan2_fn)
9675 {
9676 tree new_arg = builtin_save_expr (arg);
9677 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9678 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9679 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9680 }
9681 }
9682
9683 return NULL_TREE;
9684 }
9685
9686 /* Fold a call to builtin logb/ilogb. */
9687
9688 static tree
9689 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9690 {
9691 if (! validate_arg (arg, REAL_TYPE))
9692 return NULL_TREE;
9693
9694 STRIP_NOPS (arg);
9695
9696 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9697 {
9698 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9699
9700 switch (value->cl)
9701 {
9702 case rvc_nan:
9703 case rvc_inf:
9704 /* If arg is Inf or NaN and we're logb, return it. */
9705 if (TREE_CODE (rettype) == REAL_TYPE)
9706 {
9707 /* For logb(-Inf) we have to return +Inf. */
9708 if (real_isinf (value) && real_isneg (value))
9709 {
9710 REAL_VALUE_TYPE tem;
9711 real_inf (&tem);
9712 return build_real (rettype, tem);
9713 }
9714 return fold_convert_loc (loc, rettype, arg);
9715 }
9716 /* Fall through... */
9717 case rvc_zero:
9718 /* Zero may set errno and/or raise an exception for logb, also
9719 for ilogb we don't know FP_ILOGB0. */
9720 return NULL_TREE;
9721 case rvc_normal:
9722 /* For normal numbers, proceed iff radix == 2. In GCC,
9723 normalized significands are in the range [0.5, 1.0). We
9724 want the exponent as if they were [1.0, 2.0) so get the
9725 exponent and subtract 1. */
9726 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9727 return fold_convert_loc (loc, rettype,
9728 build_int_cst (integer_type_node,
9729 REAL_EXP (value)-1));
9730 break;
9731 }
9732 }
9733
9734 return NULL_TREE;
9735 }
9736
9737 /* Fold a call to builtin significand, if radix == 2. */
9738
9739 static tree
9740 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9741 {
9742 if (! validate_arg (arg, REAL_TYPE))
9743 return NULL_TREE;
9744
9745 STRIP_NOPS (arg);
9746
9747 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9748 {
9749 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9750
9751 switch (value->cl)
9752 {
9753 case rvc_zero:
9754 case rvc_nan:
9755 case rvc_inf:
9756 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9757 return fold_convert_loc (loc, rettype, arg);
9758 case rvc_normal:
9759 /* For normal numbers, proceed iff radix == 2. */
9760 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9761 {
9762 REAL_VALUE_TYPE result = *value;
9763 /* In GCC, normalized significands are in the range [0.5,
9764 1.0). We want them to be [1.0, 2.0) so set the
9765 exponent to 1. */
9766 SET_REAL_EXP (&result, 1);
9767 return build_real (rettype, result);
9768 }
9769 break;
9770 }
9771 }
9772
9773 return NULL_TREE;
9774 }
9775
9776 /* Fold a call to builtin frexp, we can assume the base is 2. */
9777
9778 static tree
9779 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9780 {
9781 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9782 return NULL_TREE;
9783
9784 STRIP_NOPS (arg0);
9785
9786 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9787 return NULL_TREE;
9788
9789 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9790
9791 /* Proceed if a valid pointer type was passed in. */
9792 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9793 {
9794 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9795 tree frac, exp;
9796
9797 switch (value->cl)
9798 {
9799 case rvc_zero:
9800 /* For +-0, return (*exp = 0, +-0). */
9801 exp = integer_zero_node;
9802 frac = arg0;
9803 break;
9804 case rvc_nan:
9805 case rvc_inf:
9806 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9807 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9808 case rvc_normal:
9809 {
9810 /* Since the frexp function always expects base 2, and in
9811 GCC normalized significands are already in the range
9812 [0.5, 1.0), we have exactly what frexp wants. */
9813 REAL_VALUE_TYPE frac_rvt = *value;
9814 SET_REAL_EXP (&frac_rvt, 0);
9815 frac = build_real (rettype, frac_rvt);
9816 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9817 }
9818 break;
9819 default:
9820 gcc_unreachable ();
9821 }
9822
9823 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9824 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9825 TREE_SIDE_EFFECTS (arg1) = 1;
9826 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9827 }
9828
9829 return NULL_TREE;
9830 }
9831
9832 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9833 then we can assume the base is two. If it's false, then we have to
9834 check the mode of the TYPE parameter in certain cases. */
9835
9836 static tree
9837 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9838 tree type, bool ldexp)
9839 {
9840 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9841 {
9842 STRIP_NOPS (arg0);
9843 STRIP_NOPS (arg1);
9844
9845 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9846 if (real_zerop (arg0) || integer_zerop (arg1)
9847 || (TREE_CODE (arg0) == REAL_CST
9848 && !real_isfinite (&TREE_REAL_CST (arg0))))
9849 return omit_one_operand_loc (loc, type, arg0, arg1);
9850
9851 /* If both arguments are constant, then try to evaluate it. */
9852 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9853 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9854 && tree_fits_shwi_p (arg1))
9855 {
9856 /* Bound the maximum adjustment to twice the range of the
9857 mode's valid exponents. Use abs to ensure the range is
9858 positive as a sanity check. */
9859 const long max_exp_adj = 2 *
9860 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9861 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9862
9863 /* Get the user-requested adjustment. */
9864 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9865
9866 /* The requested adjustment must be inside this range. This
9867 is a preliminary cap to avoid things like overflow, we
9868 may still fail to compute the result for other reasons. */
9869 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9870 {
9871 REAL_VALUE_TYPE initial_result;
9872
9873 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9874
9875 /* Ensure we didn't overflow. */
9876 if (! real_isinf (&initial_result))
9877 {
9878 const REAL_VALUE_TYPE trunc_result
9879 = real_value_truncate (TYPE_MODE (type), initial_result);
9880
9881 /* Only proceed if the target mode can hold the
9882 resulting value. */
9883 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9884 return build_real (type, trunc_result);
9885 }
9886 }
9887 }
9888 }
9889
9890 return NULL_TREE;
9891 }
9892
9893 /* Fold a call to builtin modf. */
9894
9895 static tree
9896 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9897 {
9898 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9899 return NULL_TREE;
9900
9901 STRIP_NOPS (arg0);
9902
9903 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9904 return NULL_TREE;
9905
9906 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9907
9908 /* Proceed if a valid pointer type was passed in. */
9909 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9910 {
9911 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9912 REAL_VALUE_TYPE trunc, frac;
9913
9914 switch (value->cl)
9915 {
9916 case rvc_nan:
9917 case rvc_zero:
9918 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9919 trunc = frac = *value;
9920 break;
9921 case rvc_inf:
9922 /* For +-Inf, return (*arg1 = arg0, +-0). */
9923 frac = dconst0;
9924 frac.sign = value->sign;
9925 trunc = *value;
9926 break;
9927 case rvc_normal:
9928 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9929 real_trunc (&trunc, VOIDmode, value);
9930 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9931 /* If the original number was negative and already
9932 integral, then the fractional part is -0.0. */
9933 if (value->sign && frac.cl == rvc_zero)
9934 frac.sign = value->sign;
9935 break;
9936 }
9937
9938 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9939 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9940 build_real (rettype, trunc));
9941 TREE_SIDE_EFFECTS (arg1) = 1;
9942 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9943 build_real (rettype, frac));
9944 }
9945
9946 return NULL_TREE;
9947 }
9948
9949 /* Given a location LOC, an interclass builtin function decl FNDECL
9950 and its single argument ARG, return an folded expression computing
9951 the same, or NULL_TREE if we either couldn't or didn't want to fold
9952 (the latter happen if there's an RTL instruction available). */
9953
9954 static tree
9955 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9956 {
9957 enum machine_mode mode;
9958
9959 if (!validate_arg (arg, REAL_TYPE))
9960 return NULL_TREE;
9961
9962 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9963 return NULL_TREE;
9964
9965 mode = TYPE_MODE (TREE_TYPE (arg));
9966
9967 /* If there is no optab, try generic code. */
9968 switch (DECL_FUNCTION_CODE (fndecl))
9969 {
9970 tree result;
9971
9972 CASE_FLT_FN (BUILT_IN_ISINF):
9973 {
9974 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9975 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9976 tree const type = TREE_TYPE (arg);
9977 REAL_VALUE_TYPE r;
9978 char buf[128];
9979
9980 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9981 real_from_string (&r, buf);
9982 result = build_call_expr (isgr_fn, 2,
9983 fold_build1_loc (loc, ABS_EXPR, type, arg),
9984 build_real (type, r));
9985 return result;
9986 }
9987 CASE_FLT_FN (BUILT_IN_FINITE):
9988 case BUILT_IN_ISFINITE:
9989 {
9990 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9991 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9992 tree const type = TREE_TYPE (arg);
9993 REAL_VALUE_TYPE r;
9994 char buf[128];
9995
9996 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9997 real_from_string (&r, buf);
9998 result = build_call_expr (isle_fn, 2,
9999 fold_build1_loc (loc, ABS_EXPR, type, arg),
10000 build_real (type, r));
10001 /*result = fold_build2_loc (loc, UNGT_EXPR,
10002 TREE_TYPE (TREE_TYPE (fndecl)),
10003 fold_build1_loc (loc, ABS_EXPR, type, arg),
10004 build_real (type, r));
10005 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10006 TREE_TYPE (TREE_TYPE (fndecl)),
10007 result);*/
10008 return result;
10009 }
10010 case BUILT_IN_ISNORMAL:
10011 {
10012 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10013 islessequal(fabs(x),DBL_MAX). */
10014 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10015 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10016 tree const type = TREE_TYPE (arg);
10017 REAL_VALUE_TYPE rmax, rmin;
10018 char buf[128];
10019
10020 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10021 real_from_string (&rmax, buf);
10022 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10023 real_from_string (&rmin, buf);
10024 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10025 result = build_call_expr (isle_fn, 2, arg,
10026 build_real (type, rmax));
10027 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10028 build_call_expr (isge_fn, 2, arg,
10029 build_real (type, rmin)));
10030 return result;
10031 }
10032 default:
10033 break;
10034 }
10035
10036 return NULL_TREE;
10037 }
10038
10039 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10040 ARG is the argument for the call. */
10041
10042 static tree
10043 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10044 {
10045 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10046 REAL_VALUE_TYPE r;
10047
10048 if (!validate_arg (arg, REAL_TYPE))
10049 return NULL_TREE;
10050
10051 switch (builtin_index)
10052 {
10053 case BUILT_IN_ISINF:
10054 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10055 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10056
10057 if (TREE_CODE (arg) == REAL_CST)
10058 {
10059 r = TREE_REAL_CST (arg);
10060 if (real_isinf (&r))
10061 return real_compare (GT_EXPR, &r, &dconst0)
10062 ? integer_one_node : integer_minus_one_node;
10063 else
10064 return integer_zero_node;
10065 }
10066
10067 return NULL_TREE;
10068
10069 case BUILT_IN_ISINF_SIGN:
10070 {
10071 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10072 /* In a boolean context, GCC will fold the inner COND_EXPR to
10073 1. So e.g. "if (isinf_sign(x))" would be folded to just
10074 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10075 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10076 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10077 tree tmp = NULL_TREE;
10078
10079 arg = builtin_save_expr (arg);
10080
10081 if (signbit_fn && isinf_fn)
10082 {
10083 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10084 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10085
10086 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10087 signbit_call, integer_zero_node);
10088 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10089 isinf_call, integer_zero_node);
10090
10091 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10092 integer_minus_one_node, integer_one_node);
10093 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10094 isinf_call, tmp,
10095 integer_zero_node);
10096 }
10097
10098 return tmp;
10099 }
10100
10101 case BUILT_IN_ISFINITE:
10102 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10103 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10104 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10105
10106 if (TREE_CODE (arg) == REAL_CST)
10107 {
10108 r = TREE_REAL_CST (arg);
10109 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10110 }
10111
10112 return NULL_TREE;
10113
10114 case BUILT_IN_ISNAN:
10115 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10116 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10117
10118 if (TREE_CODE (arg) == REAL_CST)
10119 {
10120 r = TREE_REAL_CST (arg);
10121 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10122 }
10123
10124 arg = builtin_save_expr (arg);
10125 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10126
10127 default:
10128 gcc_unreachable ();
10129 }
10130 }
10131
10132 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10133 This builtin will generate code to return the appropriate floating
10134 point classification depending on the value of the floating point
10135 number passed in. The possible return values must be supplied as
10136 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10137 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10138 one floating point argument which is "type generic". */
10139
10140 static tree
10141 fold_builtin_fpclassify (location_t loc, tree exp)
10142 {
10143 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10144 arg, type, res, tmp;
10145 enum machine_mode mode;
10146 REAL_VALUE_TYPE r;
10147 char buf[128];
10148
10149 /* Verify the required arguments in the original call. */
10150 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10151 INTEGER_TYPE, INTEGER_TYPE,
10152 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10153 return NULL_TREE;
10154
10155 fp_nan = CALL_EXPR_ARG (exp, 0);
10156 fp_infinite = CALL_EXPR_ARG (exp, 1);
10157 fp_normal = CALL_EXPR_ARG (exp, 2);
10158 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10159 fp_zero = CALL_EXPR_ARG (exp, 4);
10160 arg = CALL_EXPR_ARG (exp, 5);
10161 type = TREE_TYPE (arg);
10162 mode = TYPE_MODE (type);
10163 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10164
10165 /* fpclassify(x) ->
10166 isnan(x) ? FP_NAN :
10167 (fabs(x) == Inf ? FP_INFINITE :
10168 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10169 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10170
10171 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10172 build_real (type, dconst0));
10173 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10174 tmp, fp_zero, fp_subnormal);
10175
10176 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10177 real_from_string (&r, buf);
10178 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10179 arg, build_real (type, r));
10180 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10181
10182 if (HONOR_INFINITIES (mode))
10183 {
10184 real_inf (&r);
10185 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10186 build_real (type, r));
10187 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10188 fp_infinite, res);
10189 }
10190
10191 if (HONOR_NANS (mode))
10192 {
10193 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10194 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10195 }
10196
10197 return res;
10198 }
10199
10200 /* Fold a call to an unordered comparison function such as
10201 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10202 being called and ARG0 and ARG1 are the arguments for the call.
10203 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10204 the opposite of the desired result. UNORDERED_CODE is used
10205 for modes that can hold NaNs and ORDERED_CODE is used for
10206 the rest. */
10207
10208 static tree
10209 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10210 enum tree_code unordered_code,
10211 enum tree_code ordered_code)
10212 {
10213 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10214 enum tree_code code;
10215 tree type0, type1;
10216 enum tree_code code0, code1;
10217 tree cmp_type = NULL_TREE;
10218
10219 type0 = TREE_TYPE (arg0);
10220 type1 = TREE_TYPE (arg1);
10221
10222 code0 = TREE_CODE (type0);
10223 code1 = TREE_CODE (type1);
10224
10225 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10226 /* Choose the wider of two real types. */
10227 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10228 ? type0 : type1;
10229 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10230 cmp_type = type0;
10231 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10232 cmp_type = type1;
10233
10234 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10235 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10236
10237 if (unordered_code == UNORDERED_EXPR)
10238 {
10239 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10240 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10241 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10242 }
10243
10244 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10245 : ordered_code;
10246 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10247 fold_build2_loc (loc, code, type, arg0, arg1));
10248 }
10249
10250 /* Fold a call to built-in function FNDECL with 0 arguments.
10251 IGNORE is true if the result of the function call is ignored. This
10252 function returns NULL_TREE if no simplification was possible. */
10253
10254 static tree
10255 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10256 {
10257 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10258 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10259 switch (fcode)
10260 {
10261 CASE_FLT_FN (BUILT_IN_INF):
10262 case BUILT_IN_INFD32:
10263 case BUILT_IN_INFD64:
10264 case BUILT_IN_INFD128:
10265 return fold_builtin_inf (loc, type, true);
10266
10267 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10268 return fold_builtin_inf (loc, type, false);
10269
10270 case BUILT_IN_CLASSIFY_TYPE:
10271 return fold_builtin_classify_type (NULL_TREE);
10272
10273 case BUILT_IN_UNREACHABLE:
10274 if (flag_sanitize & SANITIZE_UNREACHABLE
10275 && (current_function_decl == NULL
10276 || !lookup_attribute ("no_sanitize_undefined",
10277 DECL_ATTRIBUTES (current_function_decl))))
10278 return ubsan_instrument_unreachable (loc);
10279 break;
10280
10281 default:
10282 break;
10283 }
10284 return NULL_TREE;
10285 }
10286
10287 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10288 IGNORE is true if the result of the function call is ignored. This
10289 function returns NULL_TREE if no simplification was possible. */
10290
10291 static tree
10292 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10293 {
10294 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10295 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10296 switch (fcode)
10297 {
10298 case BUILT_IN_CONSTANT_P:
10299 {
10300 tree val = fold_builtin_constant_p (arg0);
10301
10302 /* Gimplification will pull the CALL_EXPR for the builtin out of
10303 an if condition. When not optimizing, we'll not CSE it back.
10304 To avoid link error types of regressions, return false now. */
10305 if (!val && !optimize)
10306 val = integer_zero_node;
10307
10308 return val;
10309 }
10310
10311 case BUILT_IN_CLASSIFY_TYPE:
10312 return fold_builtin_classify_type (arg0);
10313
10314 case BUILT_IN_STRLEN:
10315 return fold_builtin_strlen (loc, type, arg0);
10316
10317 CASE_FLT_FN (BUILT_IN_FABS):
10318 case BUILT_IN_FABSD32:
10319 case BUILT_IN_FABSD64:
10320 case BUILT_IN_FABSD128:
10321 return fold_builtin_fabs (loc, arg0, type);
10322
10323 case BUILT_IN_ABS:
10324 case BUILT_IN_LABS:
10325 case BUILT_IN_LLABS:
10326 case BUILT_IN_IMAXABS:
10327 return fold_builtin_abs (loc, arg0, type);
10328
10329 CASE_FLT_FN (BUILT_IN_CONJ):
10330 if (validate_arg (arg0, COMPLEX_TYPE)
10331 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10332 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10333 break;
10334
10335 CASE_FLT_FN (BUILT_IN_CREAL):
10336 if (validate_arg (arg0, COMPLEX_TYPE)
10337 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10338 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10339 break;
10340
10341 CASE_FLT_FN (BUILT_IN_CIMAG):
10342 if (validate_arg (arg0, COMPLEX_TYPE)
10343 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10344 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10345 break;
10346
10347 CASE_FLT_FN (BUILT_IN_CCOS):
10348 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10349
10350 CASE_FLT_FN (BUILT_IN_CCOSH):
10351 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10352
10353 CASE_FLT_FN (BUILT_IN_CPROJ):
10354 return fold_builtin_cproj (loc, arg0, type);
10355
10356 CASE_FLT_FN (BUILT_IN_CSIN):
10357 if (validate_arg (arg0, COMPLEX_TYPE)
10358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10359 return do_mpc_arg1 (arg0, type, mpc_sin);
10360 break;
10361
10362 CASE_FLT_FN (BUILT_IN_CSINH):
10363 if (validate_arg (arg0, COMPLEX_TYPE)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10365 return do_mpc_arg1 (arg0, type, mpc_sinh);
10366 break;
10367
10368 CASE_FLT_FN (BUILT_IN_CTAN):
10369 if (validate_arg (arg0, COMPLEX_TYPE)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10371 return do_mpc_arg1 (arg0, type, mpc_tan);
10372 break;
10373
10374 CASE_FLT_FN (BUILT_IN_CTANH):
10375 if (validate_arg (arg0, COMPLEX_TYPE)
10376 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10377 return do_mpc_arg1 (arg0, type, mpc_tanh);
10378 break;
10379
10380 CASE_FLT_FN (BUILT_IN_CLOG):
10381 if (validate_arg (arg0, COMPLEX_TYPE)
10382 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10383 return do_mpc_arg1 (arg0, type, mpc_log);
10384 break;
10385
10386 CASE_FLT_FN (BUILT_IN_CSQRT):
10387 if (validate_arg (arg0, COMPLEX_TYPE)
10388 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10389 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10390 break;
10391
10392 CASE_FLT_FN (BUILT_IN_CASIN):
10393 if (validate_arg (arg0, COMPLEX_TYPE)
10394 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10395 return do_mpc_arg1 (arg0, type, mpc_asin);
10396 break;
10397
10398 CASE_FLT_FN (BUILT_IN_CACOS):
10399 if (validate_arg (arg0, COMPLEX_TYPE)
10400 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10401 return do_mpc_arg1 (arg0, type, mpc_acos);
10402 break;
10403
10404 CASE_FLT_FN (BUILT_IN_CATAN):
10405 if (validate_arg (arg0, COMPLEX_TYPE)
10406 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10407 return do_mpc_arg1 (arg0, type, mpc_atan);
10408 break;
10409
10410 CASE_FLT_FN (BUILT_IN_CASINH):
10411 if (validate_arg (arg0, COMPLEX_TYPE)
10412 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10413 return do_mpc_arg1 (arg0, type, mpc_asinh);
10414 break;
10415
10416 CASE_FLT_FN (BUILT_IN_CACOSH):
10417 if (validate_arg (arg0, COMPLEX_TYPE)
10418 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10419 return do_mpc_arg1 (arg0, type, mpc_acosh);
10420 break;
10421
10422 CASE_FLT_FN (BUILT_IN_CATANH):
10423 if (validate_arg (arg0, COMPLEX_TYPE)
10424 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10425 return do_mpc_arg1 (arg0, type, mpc_atanh);
10426 break;
10427
10428 CASE_FLT_FN (BUILT_IN_CABS):
10429 return fold_builtin_cabs (loc, arg0, type, fndecl);
10430
10431 CASE_FLT_FN (BUILT_IN_CARG):
10432 return fold_builtin_carg (loc, arg0, type);
10433
10434 CASE_FLT_FN (BUILT_IN_SQRT):
10435 return fold_builtin_sqrt (loc, arg0, type);
10436
10437 CASE_FLT_FN (BUILT_IN_CBRT):
10438 return fold_builtin_cbrt (loc, arg0, type);
10439
10440 CASE_FLT_FN (BUILT_IN_ASIN):
10441 if (validate_arg (arg0, REAL_TYPE))
10442 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10443 &dconstm1, &dconst1, true);
10444 break;
10445
10446 CASE_FLT_FN (BUILT_IN_ACOS):
10447 if (validate_arg (arg0, REAL_TYPE))
10448 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10449 &dconstm1, &dconst1, true);
10450 break;
10451
10452 CASE_FLT_FN (BUILT_IN_ATAN):
10453 if (validate_arg (arg0, REAL_TYPE))
10454 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10455 break;
10456
10457 CASE_FLT_FN (BUILT_IN_ASINH):
10458 if (validate_arg (arg0, REAL_TYPE))
10459 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10460 break;
10461
10462 CASE_FLT_FN (BUILT_IN_ACOSH):
10463 if (validate_arg (arg0, REAL_TYPE))
10464 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10465 &dconst1, NULL, true);
10466 break;
10467
10468 CASE_FLT_FN (BUILT_IN_ATANH):
10469 if (validate_arg (arg0, REAL_TYPE))
10470 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10471 &dconstm1, &dconst1, false);
10472 break;
10473
10474 CASE_FLT_FN (BUILT_IN_SIN):
10475 if (validate_arg (arg0, REAL_TYPE))
10476 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10477 break;
10478
10479 CASE_FLT_FN (BUILT_IN_COS):
10480 return fold_builtin_cos (loc, arg0, type, fndecl);
10481
10482 CASE_FLT_FN (BUILT_IN_TAN):
10483 return fold_builtin_tan (arg0, type);
10484
10485 CASE_FLT_FN (BUILT_IN_CEXP):
10486 return fold_builtin_cexp (loc, arg0, type);
10487
10488 CASE_FLT_FN (BUILT_IN_CEXPI):
10489 if (validate_arg (arg0, REAL_TYPE))
10490 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10491 break;
10492
10493 CASE_FLT_FN (BUILT_IN_SINH):
10494 if (validate_arg (arg0, REAL_TYPE))
10495 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10496 break;
10497
10498 CASE_FLT_FN (BUILT_IN_COSH):
10499 return fold_builtin_cosh (loc, arg0, type, fndecl);
10500
10501 CASE_FLT_FN (BUILT_IN_TANH):
10502 if (validate_arg (arg0, REAL_TYPE))
10503 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10504 break;
10505
10506 CASE_FLT_FN (BUILT_IN_ERF):
10507 if (validate_arg (arg0, REAL_TYPE))
10508 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10509 break;
10510
10511 CASE_FLT_FN (BUILT_IN_ERFC):
10512 if (validate_arg (arg0, REAL_TYPE))
10513 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10514 break;
10515
10516 CASE_FLT_FN (BUILT_IN_TGAMMA):
10517 if (validate_arg (arg0, REAL_TYPE))
10518 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10519 break;
10520
10521 CASE_FLT_FN (BUILT_IN_EXP):
10522 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10523
10524 CASE_FLT_FN (BUILT_IN_EXP2):
10525 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10526
10527 CASE_FLT_FN (BUILT_IN_EXP10):
10528 CASE_FLT_FN (BUILT_IN_POW10):
10529 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10530
10531 CASE_FLT_FN (BUILT_IN_EXPM1):
10532 if (validate_arg (arg0, REAL_TYPE))
10533 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10534 break;
10535
10536 CASE_FLT_FN (BUILT_IN_LOG):
10537 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10538
10539 CASE_FLT_FN (BUILT_IN_LOG2):
10540 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10541
10542 CASE_FLT_FN (BUILT_IN_LOG10):
10543 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10544
10545 CASE_FLT_FN (BUILT_IN_LOG1P):
10546 if (validate_arg (arg0, REAL_TYPE))
10547 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10548 &dconstm1, NULL, false);
10549 break;
10550
10551 CASE_FLT_FN (BUILT_IN_J0):
10552 if (validate_arg (arg0, REAL_TYPE))
10553 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10554 NULL, NULL, 0);
10555 break;
10556
10557 CASE_FLT_FN (BUILT_IN_J1):
10558 if (validate_arg (arg0, REAL_TYPE))
10559 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10560 NULL, NULL, 0);
10561 break;
10562
10563 CASE_FLT_FN (BUILT_IN_Y0):
10564 if (validate_arg (arg0, REAL_TYPE))
10565 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10566 &dconst0, NULL, false);
10567 break;
10568
10569 CASE_FLT_FN (BUILT_IN_Y1):
10570 if (validate_arg (arg0, REAL_TYPE))
10571 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10572 &dconst0, NULL, false);
10573 break;
10574
10575 CASE_FLT_FN (BUILT_IN_NAN):
10576 case BUILT_IN_NAND32:
10577 case BUILT_IN_NAND64:
10578 case BUILT_IN_NAND128:
10579 return fold_builtin_nan (arg0, type, true);
10580
10581 CASE_FLT_FN (BUILT_IN_NANS):
10582 return fold_builtin_nan (arg0, type, false);
10583
10584 CASE_FLT_FN (BUILT_IN_FLOOR):
10585 return fold_builtin_floor (loc, fndecl, arg0);
10586
10587 CASE_FLT_FN (BUILT_IN_CEIL):
10588 return fold_builtin_ceil (loc, fndecl, arg0);
10589
10590 CASE_FLT_FN (BUILT_IN_TRUNC):
10591 return fold_builtin_trunc (loc, fndecl, arg0);
10592
10593 CASE_FLT_FN (BUILT_IN_ROUND):
10594 return fold_builtin_round (loc, fndecl, arg0);
10595
10596 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10597 CASE_FLT_FN (BUILT_IN_RINT):
10598 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10599
10600 CASE_FLT_FN (BUILT_IN_ICEIL):
10601 CASE_FLT_FN (BUILT_IN_LCEIL):
10602 CASE_FLT_FN (BUILT_IN_LLCEIL):
10603 CASE_FLT_FN (BUILT_IN_LFLOOR):
10604 CASE_FLT_FN (BUILT_IN_IFLOOR):
10605 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10606 CASE_FLT_FN (BUILT_IN_IROUND):
10607 CASE_FLT_FN (BUILT_IN_LROUND):
10608 CASE_FLT_FN (BUILT_IN_LLROUND):
10609 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10610
10611 CASE_FLT_FN (BUILT_IN_IRINT):
10612 CASE_FLT_FN (BUILT_IN_LRINT):
10613 CASE_FLT_FN (BUILT_IN_LLRINT):
10614 return fold_fixed_mathfn (loc, fndecl, arg0);
10615
10616 case BUILT_IN_BSWAP16:
10617 case BUILT_IN_BSWAP32:
10618 case BUILT_IN_BSWAP64:
10619 return fold_builtin_bswap (fndecl, arg0);
10620
10621 CASE_INT_FN (BUILT_IN_FFS):
10622 CASE_INT_FN (BUILT_IN_CLZ):
10623 CASE_INT_FN (BUILT_IN_CTZ):
10624 CASE_INT_FN (BUILT_IN_CLRSB):
10625 CASE_INT_FN (BUILT_IN_POPCOUNT):
10626 CASE_INT_FN (BUILT_IN_PARITY):
10627 return fold_builtin_bitop (fndecl, arg0);
10628
10629 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10630 return fold_builtin_signbit (loc, arg0, type);
10631
10632 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10633 return fold_builtin_significand (loc, arg0, type);
10634
10635 CASE_FLT_FN (BUILT_IN_ILOGB):
10636 CASE_FLT_FN (BUILT_IN_LOGB):
10637 return fold_builtin_logb (loc, arg0, type);
10638
10639 case BUILT_IN_ISASCII:
10640 return fold_builtin_isascii (loc, arg0);
10641
10642 case BUILT_IN_TOASCII:
10643 return fold_builtin_toascii (loc, arg0);
10644
10645 case BUILT_IN_ISDIGIT:
10646 return fold_builtin_isdigit (loc, arg0);
10647
10648 CASE_FLT_FN (BUILT_IN_FINITE):
10649 case BUILT_IN_FINITED32:
10650 case BUILT_IN_FINITED64:
10651 case BUILT_IN_FINITED128:
10652 case BUILT_IN_ISFINITE:
10653 {
10654 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10655 if (ret)
10656 return ret;
10657 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10658 }
10659
10660 CASE_FLT_FN (BUILT_IN_ISINF):
10661 case BUILT_IN_ISINFD32:
10662 case BUILT_IN_ISINFD64:
10663 case BUILT_IN_ISINFD128:
10664 {
10665 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10666 if (ret)
10667 return ret;
10668 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10669 }
10670
10671 case BUILT_IN_ISNORMAL:
10672 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10673
10674 case BUILT_IN_ISINF_SIGN:
10675 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10676
10677 CASE_FLT_FN (BUILT_IN_ISNAN):
10678 case BUILT_IN_ISNAND32:
10679 case BUILT_IN_ISNAND64:
10680 case BUILT_IN_ISNAND128:
10681 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10682
10683 case BUILT_IN_PRINTF:
10684 case BUILT_IN_PRINTF_UNLOCKED:
10685 case BUILT_IN_VPRINTF:
10686 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10687
10688 case BUILT_IN_FREE:
10689 if (integer_zerop (arg0))
10690 return build_empty_stmt (loc);
10691 break;
10692
10693 default:
10694 break;
10695 }
10696
10697 return NULL_TREE;
10698
10699 }
10700
10701 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10702 IGNORE is true if the result of the function call is ignored. This
10703 function returns NULL_TREE if no simplification was possible. */
10704
10705 static tree
10706 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10707 {
10708 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10709 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10710
10711 switch (fcode)
10712 {
10713 CASE_FLT_FN (BUILT_IN_JN):
10714 if (validate_arg (arg0, INTEGER_TYPE)
10715 && validate_arg (arg1, REAL_TYPE))
10716 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10717 break;
10718
10719 CASE_FLT_FN (BUILT_IN_YN):
10720 if (validate_arg (arg0, INTEGER_TYPE)
10721 && validate_arg (arg1, REAL_TYPE))
10722 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10723 &dconst0, false);
10724 break;
10725
10726 CASE_FLT_FN (BUILT_IN_DREM):
10727 CASE_FLT_FN (BUILT_IN_REMAINDER):
10728 if (validate_arg (arg0, REAL_TYPE)
10729 && validate_arg (arg1, REAL_TYPE))
10730 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10731 break;
10732
10733 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10734 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10735 if (validate_arg (arg0, REAL_TYPE)
10736 && validate_arg (arg1, POINTER_TYPE))
10737 return do_mpfr_lgamma_r (arg0, arg1, type);
10738 break;
10739
10740 CASE_FLT_FN (BUILT_IN_ATAN2):
10741 if (validate_arg (arg0, REAL_TYPE)
10742 && validate_arg (arg1, REAL_TYPE))
10743 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10744 break;
10745
10746 CASE_FLT_FN (BUILT_IN_FDIM):
10747 if (validate_arg (arg0, REAL_TYPE)
10748 && validate_arg (arg1, REAL_TYPE))
10749 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10750 break;
10751
10752 CASE_FLT_FN (BUILT_IN_HYPOT):
10753 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10754
10755 CASE_FLT_FN (BUILT_IN_CPOW):
10756 if (validate_arg (arg0, COMPLEX_TYPE)
10757 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10758 && validate_arg (arg1, COMPLEX_TYPE)
10759 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10760 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10761 break;
10762
10763 CASE_FLT_FN (BUILT_IN_LDEXP):
10764 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10765 CASE_FLT_FN (BUILT_IN_SCALBN):
10766 CASE_FLT_FN (BUILT_IN_SCALBLN):
10767 return fold_builtin_load_exponent (loc, arg0, arg1,
10768 type, /*ldexp=*/false);
10769
10770 CASE_FLT_FN (BUILT_IN_FREXP):
10771 return fold_builtin_frexp (loc, arg0, arg1, type);
10772
10773 CASE_FLT_FN (BUILT_IN_MODF):
10774 return fold_builtin_modf (loc, arg0, arg1, type);
10775
10776 case BUILT_IN_BZERO:
10777 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10778
10779 case BUILT_IN_FPUTS:
10780 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10781
10782 case BUILT_IN_FPUTS_UNLOCKED:
10783 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10784
10785 case BUILT_IN_STRSTR:
10786 return fold_builtin_strstr (loc, arg0, arg1, type);
10787
10788 case BUILT_IN_STRCAT:
10789 return fold_builtin_strcat (loc, arg0, arg1);
10790
10791 case BUILT_IN_STRSPN:
10792 return fold_builtin_strspn (loc, arg0, arg1);
10793
10794 case BUILT_IN_STRCSPN:
10795 return fold_builtin_strcspn (loc, arg0, arg1);
10796
10797 case BUILT_IN_STRCHR:
10798 case BUILT_IN_INDEX:
10799 return fold_builtin_strchr (loc, arg0, arg1, type);
10800
10801 case BUILT_IN_STRRCHR:
10802 case BUILT_IN_RINDEX:
10803 return fold_builtin_strrchr (loc, arg0, arg1, type);
10804
10805 case BUILT_IN_STRCPY:
10806 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10807
10808 case BUILT_IN_STPCPY:
10809 if (ignore)
10810 {
10811 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10812 if (!fn)
10813 break;
10814
10815 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10816 }
10817 else
10818 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10819 break;
10820
10821 case BUILT_IN_STRCMP:
10822 return fold_builtin_strcmp (loc, arg0, arg1);
10823
10824 case BUILT_IN_STRPBRK:
10825 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10826
10827 case BUILT_IN_EXPECT:
10828 return fold_builtin_expect (loc, arg0, arg1);
10829
10830 CASE_FLT_FN (BUILT_IN_POW):
10831 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10832
10833 CASE_FLT_FN (BUILT_IN_POWI):
10834 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10835
10836 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10837 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10838
10839 CASE_FLT_FN (BUILT_IN_FMIN):
10840 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10841
10842 CASE_FLT_FN (BUILT_IN_FMAX):
10843 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10844
10845 case BUILT_IN_ISGREATER:
10846 return fold_builtin_unordered_cmp (loc, fndecl,
10847 arg0, arg1, UNLE_EXPR, LE_EXPR);
10848 case BUILT_IN_ISGREATEREQUAL:
10849 return fold_builtin_unordered_cmp (loc, fndecl,
10850 arg0, arg1, UNLT_EXPR, LT_EXPR);
10851 case BUILT_IN_ISLESS:
10852 return fold_builtin_unordered_cmp (loc, fndecl,
10853 arg0, arg1, UNGE_EXPR, GE_EXPR);
10854 case BUILT_IN_ISLESSEQUAL:
10855 return fold_builtin_unordered_cmp (loc, fndecl,
10856 arg0, arg1, UNGT_EXPR, GT_EXPR);
10857 case BUILT_IN_ISLESSGREATER:
10858 return fold_builtin_unordered_cmp (loc, fndecl,
10859 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10860 case BUILT_IN_ISUNORDERED:
10861 return fold_builtin_unordered_cmp (loc, fndecl,
10862 arg0, arg1, UNORDERED_EXPR,
10863 NOP_EXPR);
10864
10865 /* We do the folding for va_start in the expander. */
10866 case BUILT_IN_VA_START:
10867 break;
10868
10869 case BUILT_IN_SPRINTF:
10870 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10871
10872 case BUILT_IN_OBJECT_SIZE:
10873 return fold_builtin_object_size (arg0, arg1);
10874
10875 case BUILT_IN_PRINTF:
10876 case BUILT_IN_PRINTF_UNLOCKED:
10877 case BUILT_IN_VPRINTF:
10878 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10879
10880 case BUILT_IN_PRINTF_CHK:
10881 case BUILT_IN_VPRINTF_CHK:
10882 if (!validate_arg (arg0, INTEGER_TYPE)
10883 || TREE_SIDE_EFFECTS (arg0))
10884 return NULL_TREE;
10885 else
10886 return fold_builtin_printf (loc, fndecl,
10887 arg1, NULL_TREE, ignore, fcode);
10888 break;
10889
10890 case BUILT_IN_FPRINTF:
10891 case BUILT_IN_FPRINTF_UNLOCKED:
10892 case BUILT_IN_VFPRINTF:
10893 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10894 ignore, fcode);
10895
10896 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10897 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10898
10899 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10900 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10901
10902 default:
10903 break;
10904 }
10905 return NULL_TREE;
10906 }
10907
10908 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10909 and ARG2. IGNORE is true if the result of the function call is ignored.
10910 This function returns NULL_TREE if no simplification was possible. */
10911
10912 static tree
10913 fold_builtin_3 (location_t loc, tree fndecl,
10914 tree arg0, tree arg1, tree arg2, bool ignore)
10915 {
10916 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10917 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10918 switch (fcode)
10919 {
10920
10921 CASE_FLT_FN (BUILT_IN_SINCOS):
10922 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10923
10924 CASE_FLT_FN (BUILT_IN_FMA):
10925 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10926 break;
10927
10928 CASE_FLT_FN (BUILT_IN_REMQUO):
10929 if (validate_arg (arg0, REAL_TYPE)
10930 && validate_arg (arg1, REAL_TYPE)
10931 && validate_arg (arg2, POINTER_TYPE))
10932 return do_mpfr_remquo (arg0, arg1, arg2);
10933 break;
10934
10935 case BUILT_IN_MEMSET:
10936 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10937
10938 case BUILT_IN_BCOPY:
10939 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10940 void_type_node, true, /*endp=*/3);
10941
10942 case BUILT_IN_MEMCPY:
10943 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10944 type, ignore, /*endp=*/0);
10945
10946 case BUILT_IN_MEMPCPY:
10947 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10948 type, ignore, /*endp=*/1);
10949
10950 case BUILT_IN_MEMMOVE:
10951 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10952 type, ignore, /*endp=*/3);
10953
10954 case BUILT_IN_STRNCAT:
10955 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10956
10957 case BUILT_IN_STRNCPY:
10958 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10959
10960 case BUILT_IN_STRNCMP:
10961 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10962
10963 case BUILT_IN_MEMCHR:
10964 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10965
10966 case BUILT_IN_BCMP:
10967 case BUILT_IN_MEMCMP:
10968 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10969
10970 case BUILT_IN_SPRINTF:
10971 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10972
10973 case BUILT_IN_SNPRINTF:
10974 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10975
10976 case BUILT_IN_STRCPY_CHK:
10977 case BUILT_IN_STPCPY_CHK:
10978 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10979 ignore, fcode);
10980
10981 case BUILT_IN_STRCAT_CHK:
10982 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10983
10984 case BUILT_IN_PRINTF_CHK:
10985 case BUILT_IN_VPRINTF_CHK:
10986 if (!validate_arg (arg0, INTEGER_TYPE)
10987 || TREE_SIDE_EFFECTS (arg0))
10988 return NULL_TREE;
10989 else
10990 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10991 break;
10992
10993 case BUILT_IN_FPRINTF:
10994 case BUILT_IN_FPRINTF_UNLOCKED:
10995 case BUILT_IN_VFPRINTF:
10996 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10997 ignore, fcode);
10998
10999 case BUILT_IN_FPRINTF_CHK:
11000 case BUILT_IN_VFPRINTF_CHK:
11001 if (!validate_arg (arg1, INTEGER_TYPE)
11002 || TREE_SIDE_EFFECTS (arg1))
11003 return NULL_TREE;
11004 else
11005 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11006 ignore, fcode);
11007
11008 default:
11009 break;
11010 }
11011 return NULL_TREE;
11012 }
11013
11014 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11015 ARG2, and ARG3. IGNORE is true if the result of the function call is
11016 ignored. This function returns NULL_TREE if no simplification was
11017 possible. */
11018
11019 static tree
11020 fold_builtin_4 (location_t loc, tree fndecl,
11021 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11022 {
11023 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11024
11025 switch (fcode)
11026 {
11027 case BUILT_IN_MEMCPY_CHK:
11028 case BUILT_IN_MEMPCPY_CHK:
11029 case BUILT_IN_MEMMOVE_CHK:
11030 case BUILT_IN_MEMSET_CHK:
11031 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11032 NULL_TREE, ignore,
11033 DECL_FUNCTION_CODE (fndecl));
11034
11035 case BUILT_IN_STRNCPY_CHK:
11036 case BUILT_IN_STPNCPY_CHK:
11037 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11038 ignore, fcode);
11039
11040 case BUILT_IN_STRNCAT_CHK:
11041 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11042
11043 case BUILT_IN_SNPRINTF:
11044 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11045
11046 case BUILT_IN_FPRINTF_CHK:
11047 case BUILT_IN_VFPRINTF_CHK:
11048 if (!validate_arg (arg1, INTEGER_TYPE)
11049 || TREE_SIDE_EFFECTS (arg1))
11050 return NULL_TREE;
11051 else
11052 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11053 ignore, fcode);
11054 break;
11055
11056 default:
11057 break;
11058 }
11059 return NULL_TREE;
11060 }
11061
11062 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11063 arguments, where NARGS <= 4. IGNORE is true if the result of the
11064 function call is ignored. This function returns NULL_TREE if no
11065 simplification was possible. Note that this only folds builtins with
11066 fixed argument patterns. Foldings that do varargs-to-varargs
11067 transformations, or that match calls with more than 4 arguments,
11068 need to be handled with fold_builtin_varargs instead. */
11069
11070 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11071
11072 static tree
11073 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11074 {
11075 tree ret = NULL_TREE;
11076
11077 switch (nargs)
11078 {
11079 case 0:
11080 ret = fold_builtin_0 (loc, fndecl, ignore);
11081 break;
11082 case 1:
11083 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11084 break;
11085 case 2:
11086 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11087 break;
11088 case 3:
11089 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11090 break;
11091 case 4:
11092 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11093 ignore);
11094 break;
11095 default:
11096 break;
11097 }
11098 if (ret)
11099 {
11100 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11101 SET_EXPR_LOCATION (ret, loc);
11102 TREE_NO_WARNING (ret) = 1;
11103 return ret;
11104 }
11105 return NULL_TREE;
11106 }
11107
11108 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11109 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11110 of arguments in ARGS to be omitted. OLDNARGS is the number of
11111 elements in ARGS. */
11112
11113 static tree
11114 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11115 int skip, tree fndecl, int n, va_list newargs)
11116 {
11117 int nargs = oldnargs - skip + n;
11118 tree *buffer;
11119
11120 if (n > 0)
11121 {
11122 int i, j;
11123
11124 buffer = XALLOCAVEC (tree, nargs);
11125 for (i = 0; i < n; i++)
11126 buffer[i] = va_arg (newargs, tree);
11127 for (j = skip; j < oldnargs; j++, i++)
11128 buffer[i] = args[j];
11129 }
11130 else
11131 buffer = args + skip;
11132
11133 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11134 }
11135
11136 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11137 list ARGS along with N new arguments specified as the "..."
11138 parameters. SKIP is the number of arguments in ARGS to be omitted.
11139 OLDNARGS is the number of elements in ARGS. */
11140
11141 static tree
11142 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11143 int skip, tree fndecl, int n, ...)
11144 {
11145 va_list ap;
11146 tree t;
11147
11148 va_start (ap, n);
11149 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11150 va_end (ap);
11151
11152 return t;
11153 }
11154
11155 /* Return true if FNDECL shouldn't be folded right now.
11156 If a built-in function has an inline attribute always_inline
11157 wrapper, defer folding it after always_inline functions have
11158 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11159 might not be performed. */
11160
11161 bool
11162 avoid_folding_inline_builtin (tree fndecl)
11163 {
11164 return (DECL_DECLARED_INLINE_P (fndecl)
11165 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11166 && cfun
11167 && !cfun->always_inline_functions_inlined
11168 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11169 }
11170
11171 /* A wrapper function for builtin folding that prevents warnings for
11172 "statement without effect" and the like, caused by removing the
11173 call node earlier than the warning is generated. */
11174
11175 tree
11176 fold_call_expr (location_t loc, tree exp, bool ignore)
11177 {
11178 tree ret = NULL_TREE;
11179 tree fndecl = get_callee_fndecl (exp);
11180 if (fndecl
11181 && TREE_CODE (fndecl) == FUNCTION_DECL
11182 && DECL_BUILT_IN (fndecl)
11183 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11184 yet. Defer folding until we see all the arguments
11185 (after inlining). */
11186 && !CALL_EXPR_VA_ARG_PACK (exp))
11187 {
11188 int nargs = call_expr_nargs (exp);
11189
11190 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11191 instead last argument is __builtin_va_arg_pack (). Defer folding
11192 even in that case, until arguments are finalized. */
11193 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11194 {
11195 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11196 if (fndecl2
11197 && TREE_CODE (fndecl2) == FUNCTION_DECL
11198 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11199 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11200 return NULL_TREE;
11201 }
11202
11203 if (avoid_folding_inline_builtin (fndecl))
11204 return NULL_TREE;
11205
11206 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11207 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11208 CALL_EXPR_ARGP (exp), ignore);
11209 else
11210 {
11211 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11212 {
11213 tree *args = CALL_EXPR_ARGP (exp);
11214 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11215 }
11216 if (!ret)
11217 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11218 if (ret)
11219 return ret;
11220 }
11221 }
11222 return NULL_TREE;
11223 }
11224
11225 /* Conveniently construct a function call expression. FNDECL names the
11226 function to be called and N arguments are passed in the array
11227 ARGARRAY. */
11228
11229 tree
11230 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11231 {
11232 tree fntype = TREE_TYPE (fndecl);
11233 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11234
11235 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11236 }
11237
11238 /* Conveniently construct a function call expression. FNDECL names the
11239 function to be called and the arguments are passed in the vector
11240 VEC. */
11241
11242 tree
11243 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11244 {
11245 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11246 vec_safe_address (vec));
11247 }
11248
11249
11250 /* Conveniently construct a function call expression. FNDECL names the
11251 function to be called, N is the number of arguments, and the "..."
11252 parameters are the argument expressions. */
11253
11254 tree
11255 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11256 {
11257 va_list ap;
11258 tree *argarray = XALLOCAVEC (tree, n);
11259 int i;
11260
11261 va_start (ap, n);
11262 for (i = 0; i < n; i++)
11263 argarray[i] = va_arg (ap, tree);
11264 va_end (ap);
11265 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11266 }
11267
11268 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11269 varargs macros aren't supported by all bootstrap compilers. */
11270
11271 tree
11272 build_call_expr (tree fndecl, int n, ...)
11273 {
11274 va_list ap;
11275 tree *argarray = XALLOCAVEC (tree, n);
11276 int i;
11277
11278 va_start (ap, n);
11279 for (i = 0; i < n; i++)
11280 argarray[i] = va_arg (ap, tree);
11281 va_end (ap);
11282 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11283 }
11284
11285 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11286 N arguments are passed in the array ARGARRAY. */
11287
11288 tree
11289 fold_builtin_call_array (location_t loc, tree type,
11290 tree fn,
11291 int n,
11292 tree *argarray)
11293 {
11294 tree ret = NULL_TREE;
11295 tree exp;
11296
11297 if (TREE_CODE (fn) == ADDR_EXPR)
11298 {
11299 tree fndecl = TREE_OPERAND (fn, 0);
11300 if (TREE_CODE (fndecl) == FUNCTION_DECL
11301 && DECL_BUILT_IN (fndecl))
11302 {
11303 /* If last argument is __builtin_va_arg_pack (), arguments to this
11304 function are not finalized yet. Defer folding until they are. */
11305 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11306 {
11307 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11308 if (fndecl2
11309 && TREE_CODE (fndecl2) == FUNCTION_DECL
11310 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11311 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11312 return build_call_array_loc (loc, type, fn, n, argarray);
11313 }
11314 if (avoid_folding_inline_builtin (fndecl))
11315 return build_call_array_loc (loc, type, fn, n, argarray);
11316 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11317 {
11318 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11319 if (ret)
11320 return ret;
11321
11322 return build_call_array_loc (loc, type, fn, n, argarray);
11323 }
11324 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11325 {
11326 /* First try the transformations that don't require consing up
11327 an exp. */
11328 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11329 if (ret)
11330 return ret;
11331 }
11332
11333 /* If we got this far, we need to build an exp. */
11334 exp = build_call_array_loc (loc, type, fn, n, argarray);
11335 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11336 return ret ? ret : exp;
11337 }
11338 }
11339
11340 return build_call_array_loc (loc, type, fn, n, argarray);
11341 }
11342
11343 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11344 along with N new arguments specified as the "..." parameters. SKIP
11345 is the number of arguments in EXP to be omitted. This function is used
11346 to do varargs-to-varargs transformations. */
11347
11348 static tree
11349 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11350 {
11351 va_list ap;
11352 tree t;
11353
11354 va_start (ap, n);
11355 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11356 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11357 va_end (ap);
11358
11359 return t;
11360 }
11361
11362 /* Validate a single argument ARG against a tree code CODE representing
11363 a type. */
11364
11365 static bool
11366 validate_arg (const_tree arg, enum tree_code code)
11367 {
11368 if (!arg)
11369 return false;
11370 else if (code == POINTER_TYPE)
11371 return POINTER_TYPE_P (TREE_TYPE (arg));
11372 else if (code == INTEGER_TYPE)
11373 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11374 return code == TREE_CODE (TREE_TYPE (arg));
11375 }
11376
11377 /* This function validates the types of a function call argument list
11378 against a specified list of tree_codes. If the last specifier is a 0,
11379 that represents an ellipses, otherwise the last specifier must be a
11380 VOID_TYPE.
11381
11382 This is the GIMPLE version of validate_arglist. Eventually we want to
11383 completely convert builtins.c to work from GIMPLEs and the tree based
11384 validate_arglist will then be removed. */
11385
11386 bool
11387 validate_gimple_arglist (const_gimple call, ...)
11388 {
11389 enum tree_code code;
11390 bool res = 0;
11391 va_list ap;
11392 const_tree arg;
11393 size_t i;
11394
11395 va_start (ap, call);
11396 i = 0;
11397
11398 do
11399 {
11400 code = (enum tree_code) va_arg (ap, int);
11401 switch (code)
11402 {
11403 case 0:
11404 /* This signifies an ellipses, any further arguments are all ok. */
11405 res = true;
11406 goto end;
11407 case VOID_TYPE:
11408 /* This signifies an endlink, if no arguments remain, return
11409 true, otherwise return false. */
11410 res = (i == gimple_call_num_args (call));
11411 goto end;
11412 default:
11413 /* If no parameters remain or the parameter's code does not
11414 match the specified code, return false. Otherwise continue
11415 checking any remaining arguments. */
11416 arg = gimple_call_arg (call, i++);
11417 if (!validate_arg (arg, code))
11418 goto end;
11419 break;
11420 }
11421 }
11422 while (1);
11423
11424 /* We need gotos here since we can only have one VA_CLOSE in a
11425 function. */
11426 end: ;
11427 va_end (ap);
11428
11429 return res;
11430 }
11431
11432 /* Default target-specific builtin expander that does nothing. */
11433
11434 rtx
11435 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11436 rtx target ATTRIBUTE_UNUSED,
11437 rtx subtarget ATTRIBUTE_UNUSED,
11438 enum machine_mode mode ATTRIBUTE_UNUSED,
11439 int ignore ATTRIBUTE_UNUSED)
11440 {
11441 return NULL_RTX;
11442 }
11443
11444 /* Returns true is EXP represents data that would potentially reside
11445 in a readonly section. */
11446
11447 static bool
11448 readonly_data_expr (tree exp)
11449 {
11450 STRIP_NOPS (exp);
11451
11452 if (TREE_CODE (exp) != ADDR_EXPR)
11453 return false;
11454
11455 exp = get_base_address (TREE_OPERAND (exp, 0));
11456 if (!exp)
11457 return false;
11458
11459 /* Make sure we call decl_readonly_section only for trees it
11460 can handle (since it returns true for everything it doesn't
11461 understand). */
11462 if (TREE_CODE (exp) == STRING_CST
11463 || TREE_CODE (exp) == CONSTRUCTOR
11464 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11465 return decl_readonly_section (exp, 0);
11466 else
11467 return false;
11468 }
11469
11470 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11471 to the call, and TYPE is its return type.
11472
11473 Return NULL_TREE if no simplification was possible, otherwise return the
11474 simplified form of the call as a tree.
11475
11476 The simplified form may be a constant or other expression which
11477 computes the same value, but in a more efficient manner (including
11478 calls to other builtin functions).
11479
11480 The call may contain arguments which need to be evaluated, but
11481 which are not useful to determine the result of the call. In
11482 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11483 COMPOUND_EXPR will be an argument which must be evaluated.
11484 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11485 COMPOUND_EXPR in the chain will contain the tree for the simplified
11486 form of the builtin function call. */
11487
11488 static tree
11489 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11490 {
11491 if (!validate_arg (s1, POINTER_TYPE)
11492 || !validate_arg (s2, POINTER_TYPE))
11493 return NULL_TREE;
11494 else
11495 {
11496 tree fn;
11497 const char *p1, *p2;
11498
11499 p2 = c_getstr (s2);
11500 if (p2 == NULL)
11501 return NULL_TREE;
11502
11503 p1 = c_getstr (s1);
11504 if (p1 != NULL)
11505 {
11506 const char *r = strstr (p1, p2);
11507 tree tem;
11508
11509 if (r == NULL)
11510 return build_int_cst (TREE_TYPE (s1), 0);
11511
11512 /* Return an offset into the constant string argument. */
11513 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11514 return fold_convert_loc (loc, type, tem);
11515 }
11516
11517 /* The argument is const char *, and the result is char *, so we need
11518 a type conversion here to avoid a warning. */
11519 if (p2[0] == '\0')
11520 return fold_convert_loc (loc, type, s1);
11521
11522 if (p2[1] != '\0')
11523 return NULL_TREE;
11524
11525 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11526 if (!fn)
11527 return NULL_TREE;
11528
11529 /* New argument list transforming strstr(s1, s2) to
11530 strchr(s1, s2[0]). */
11531 return build_call_expr_loc (loc, fn, 2, s1,
11532 build_int_cst (integer_type_node, p2[0]));
11533 }
11534 }
11535
11536 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11537 the call, and TYPE is its return type.
11538
11539 Return NULL_TREE if no simplification was possible, otherwise return the
11540 simplified form of the call as a tree.
11541
11542 The simplified form may be a constant or other expression which
11543 computes the same value, but in a more efficient manner (including
11544 calls to other builtin functions).
11545
11546 The call may contain arguments which need to be evaluated, but
11547 which are not useful to determine the result of the call. In
11548 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11549 COMPOUND_EXPR will be an argument which must be evaluated.
11550 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11551 COMPOUND_EXPR in the chain will contain the tree for the simplified
11552 form of the builtin function call. */
11553
11554 static tree
11555 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11556 {
11557 if (!validate_arg (s1, POINTER_TYPE)
11558 || !validate_arg (s2, INTEGER_TYPE))
11559 return NULL_TREE;
11560 else
11561 {
11562 const char *p1;
11563
11564 if (TREE_CODE (s2) != INTEGER_CST)
11565 return NULL_TREE;
11566
11567 p1 = c_getstr (s1);
11568 if (p1 != NULL)
11569 {
11570 char c;
11571 const char *r;
11572 tree tem;
11573
11574 if (target_char_cast (s2, &c))
11575 return NULL_TREE;
11576
11577 r = strchr (p1, c);
11578
11579 if (r == NULL)
11580 return build_int_cst (TREE_TYPE (s1), 0);
11581
11582 /* Return an offset into the constant string argument. */
11583 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11584 return fold_convert_loc (loc, type, tem);
11585 }
11586 return NULL_TREE;
11587 }
11588 }
11589
11590 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11591 the call, and TYPE is its return type.
11592
11593 Return NULL_TREE if no simplification was possible, otherwise return the
11594 simplified form of the call as a tree.
11595
11596 The simplified form may be a constant or other expression which
11597 computes the same value, but in a more efficient manner (including
11598 calls to other builtin functions).
11599
11600 The call may contain arguments which need to be evaluated, but
11601 which are not useful to determine the result of the call. In
11602 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11603 COMPOUND_EXPR will be an argument which must be evaluated.
11604 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11605 COMPOUND_EXPR in the chain will contain the tree for the simplified
11606 form of the builtin function call. */
11607
11608 static tree
11609 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11610 {
11611 if (!validate_arg (s1, POINTER_TYPE)
11612 || !validate_arg (s2, INTEGER_TYPE))
11613 return NULL_TREE;
11614 else
11615 {
11616 tree fn;
11617 const char *p1;
11618
11619 if (TREE_CODE (s2) != INTEGER_CST)
11620 return NULL_TREE;
11621
11622 p1 = c_getstr (s1);
11623 if (p1 != NULL)
11624 {
11625 char c;
11626 const char *r;
11627 tree tem;
11628
11629 if (target_char_cast (s2, &c))
11630 return NULL_TREE;
11631
11632 r = strrchr (p1, c);
11633
11634 if (r == NULL)
11635 return build_int_cst (TREE_TYPE (s1), 0);
11636
11637 /* Return an offset into the constant string argument. */
11638 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11639 return fold_convert_loc (loc, type, tem);
11640 }
11641
11642 if (! integer_zerop (s2))
11643 return NULL_TREE;
11644
11645 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11646 if (!fn)
11647 return NULL_TREE;
11648
11649 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11650 return build_call_expr_loc (loc, fn, 2, s1, s2);
11651 }
11652 }
11653
11654 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11655 to the call, and TYPE is its return type.
11656
11657 Return NULL_TREE if no simplification was possible, otherwise return the
11658 simplified form of the call as a tree.
11659
11660 The simplified form may be a constant or other expression which
11661 computes the same value, but in a more efficient manner (including
11662 calls to other builtin functions).
11663
11664 The call may contain arguments which need to be evaluated, but
11665 which are not useful to determine the result of the call. In
11666 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11667 COMPOUND_EXPR will be an argument which must be evaluated.
11668 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11669 COMPOUND_EXPR in the chain will contain the tree for the simplified
11670 form of the builtin function call. */
11671
11672 static tree
11673 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11674 {
11675 if (!validate_arg (s1, POINTER_TYPE)
11676 || !validate_arg (s2, POINTER_TYPE))
11677 return NULL_TREE;
11678 else
11679 {
11680 tree fn;
11681 const char *p1, *p2;
11682
11683 p2 = c_getstr (s2);
11684 if (p2 == NULL)
11685 return NULL_TREE;
11686
11687 p1 = c_getstr (s1);
11688 if (p1 != NULL)
11689 {
11690 const char *r = strpbrk (p1, p2);
11691 tree tem;
11692
11693 if (r == NULL)
11694 return build_int_cst (TREE_TYPE (s1), 0);
11695
11696 /* Return an offset into the constant string argument. */
11697 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11698 return fold_convert_loc (loc, type, tem);
11699 }
11700
11701 if (p2[0] == '\0')
11702 /* strpbrk(x, "") == NULL.
11703 Evaluate and ignore s1 in case it had side-effects. */
11704 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11705
11706 if (p2[1] != '\0')
11707 return NULL_TREE; /* Really call strpbrk. */
11708
11709 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11710 if (!fn)
11711 return NULL_TREE;
11712
11713 /* New argument list transforming strpbrk(s1, s2) to
11714 strchr(s1, s2[0]). */
11715 return build_call_expr_loc (loc, fn, 2, s1,
11716 build_int_cst (integer_type_node, p2[0]));
11717 }
11718 }
11719
11720 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11721 to the call.
11722
11723 Return NULL_TREE if no simplification was possible, otherwise return the
11724 simplified form of the call as a tree.
11725
11726 The simplified form may be a constant or other expression which
11727 computes the same value, but in a more efficient manner (including
11728 calls to other builtin functions).
11729
11730 The call may contain arguments which need to be evaluated, but
11731 which are not useful to determine the result of the call. In
11732 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11733 COMPOUND_EXPR will be an argument which must be evaluated.
11734 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11735 COMPOUND_EXPR in the chain will contain the tree for the simplified
11736 form of the builtin function call. */
11737
11738 static tree
11739 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11740 {
11741 if (!validate_arg (dst, POINTER_TYPE)
11742 || !validate_arg (src, POINTER_TYPE))
11743 return NULL_TREE;
11744 else
11745 {
11746 const char *p = c_getstr (src);
11747
11748 /* If the string length is zero, return the dst parameter. */
11749 if (p && *p == '\0')
11750 return dst;
11751
11752 if (optimize_insn_for_speed_p ())
11753 {
11754 /* See if we can store by pieces into (dst + strlen(dst)). */
11755 tree newdst, call;
11756 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11757 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11758
11759 if (!strlen_fn || !strcpy_fn)
11760 return NULL_TREE;
11761
11762 /* If we don't have a movstr we don't want to emit an strcpy
11763 call. We have to do that if the length of the source string
11764 isn't computable (in that case we can use memcpy probably
11765 later expanding to a sequence of mov instructions). If we
11766 have movstr instructions we can emit strcpy calls. */
11767 if (!HAVE_movstr)
11768 {
11769 tree len = c_strlen (src, 1);
11770 if (! len || TREE_SIDE_EFFECTS (len))
11771 return NULL_TREE;
11772 }
11773
11774 /* Stabilize the argument list. */
11775 dst = builtin_save_expr (dst);
11776
11777 /* Create strlen (dst). */
11778 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11779 /* Create (dst p+ strlen (dst)). */
11780
11781 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11782 newdst = builtin_save_expr (newdst);
11783
11784 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11785 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11786 }
11787 return NULL_TREE;
11788 }
11789 }
11790
11791 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11792 arguments to the call.
11793
11794 Return NULL_TREE if no simplification was possible, otherwise return the
11795 simplified form of the call as a tree.
11796
11797 The simplified form may be a constant or other expression which
11798 computes the same value, but in a more efficient manner (including
11799 calls to other builtin functions).
11800
11801 The call may contain arguments which need to be evaluated, but
11802 which are not useful to determine the result of the call. In
11803 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11804 COMPOUND_EXPR will be an argument which must be evaluated.
11805 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11806 COMPOUND_EXPR in the chain will contain the tree for the simplified
11807 form of the builtin function call. */
11808
11809 static tree
11810 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11811 {
11812 if (!validate_arg (dst, POINTER_TYPE)
11813 || !validate_arg (src, POINTER_TYPE)
11814 || !validate_arg (len, INTEGER_TYPE))
11815 return NULL_TREE;
11816 else
11817 {
11818 const char *p = c_getstr (src);
11819
11820 /* If the requested length is zero, or the src parameter string
11821 length is zero, return the dst parameter. */
11822 if (integer_zerop (len) || (p && *p == '\0'))
11823 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11824
11825 /* If the requested len is greater than or equal to the string
11826 length, call strcat. */
11827 if (TREE_CODE (len) == INTEGER_CST && p
11828 && compare_tree_int (len, strlen (p)) >= 0)
11829 {
11830 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11831
11832 /* If the replacement _DECL isn't initialized, don't do the
11833 transformation. */
11834 if (!fn)
11835 return NULL_TREE;
11836
11837 return build_call_expr_loc (loc, fn, 2, dst, src);
11838 }
11839 return NULL_TREE;
11840 }
11841 }
11842
11843 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11844 to the call.
11845
11846 Return NULL_TREE if no simplification was possible, otherwise return the
11847 simplified form of the call as a tree.
11848
11849 The simplified form may be a constant or other expression which
11850 computes the same value, but in a more efficient manner (including
11851 calls to other builtin functions).
11852
11853 The call may contain arguments which need to be evaluated, but
11854 which are not useful to determine the result of the call. In
11855 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11856 COMPOUND_EXPR will be an argument which must be evaluated.
11857 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11858 COMPOUND_EXPR in the chain will contain the tree for the simplified
11859 form of the builtin function call. */
11860
11861 static tree
11862 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11863 {
11864 if (!validate_arg (s1, POINTER_TYPE)
11865 || !validate_arg (s2, POINTER_TYPE))
11866 return NULL_TREE;
11867 else
11868 {
11869 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11870
11871 /* If both arguments are constants, evaluate at compile-time. */
11872 if (p1 && p2)
11873 {
11874 const size_t r = strspn (p1, p2);
11875 return build_int_cst (size_type_node, r);
11876 }
11877
11878 /* If either argument is "", return NULL_TREE. */
11879 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11880 /* Evaluate and ignore both arguments in case either one has
11881 side-effects. */
11882 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11883 s1, s2);
11884 return NULL_TREE;
11885 }
11886 }
11887
11888 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11889 to the call.
11890
11891 Return NULL_TREE if no simplification was possible, otherwise return the
11892 simplified form of the call as a tree.
11893
11894 The simplified form may be a constant or other expression which
11895 computes the same value, but in a more efficient manner (including
11896 calls to other builtin functions).
11897
11898 The call may contain arguments which need to be evaluated, but
11899 which are not useful to determine the result of the call. In
11900 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11901 COMPOUND_EXPR will be an argument which must be evaluated.
11902 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11903 COMPOUND_EXPR in the chain will contain the tree for the simplified
11904 form of the builtin function call. */
11905
11906 static tree
11907 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11908 {
11909 if (!validate_arg (s1, POINTER_TYPE)
11910 || !validate_arg (s2, POINTER_TYPE))
11911 return NULL_TREE;
11912 else
11913 {
11914 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11915
11916 /* If both arguments are constants, evaluate at compile-time. */
11917 if (p1 && p2)
11918 {
11919 const size_t r = strcspn (p1, p2);
11920 return build_int_cst (size_type_node, r);
11921 }
11922
11923 /* If the first argument is "", return NULL_TREE. */
11924 if (p1 && *p1 == '\0')
11925 {
11926 /* Evaluate and ignore argument s2 in case it has
11927 side-effects. */
11928 return omit_one_operand_loc (loc, size_type_node,
11929 size_zero_node, s2);
11930 }
11931
11932 /* If the second argument is "", return __builtin_strlen(s1). */
11933 if (p2 && *p2 == '\0')
11934 {
11935 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11936
11937 /* If the replacement _DECL isn't initialized, don't do the
11938 transformation. */
11939 if (!fn)
11940 return NULL_TREE;
11941
11942 return build_call_expr_loc (loc, fn, 1, s1);
11943 }
11944 return NULL_TREE;
11945 }
11946 }
11947
11948 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11949 to the call. IGNORE is true if the value returned
11950 by the builtin will be ignored. UNLOCKED is true is true if this
11951 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11952 the known length of the string. Return NULL_TREE if no simplification
11953 was possible. */
11954
11955 tree
11956 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11957 bool ignore, bool unlocked, tree len)
11958 {
11959 /* If we're using an unlocked function, assume the other unlocked
11960 functions exist explicitly. */
11961 tree const fn_fputc = (unlocked
11962 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11963 : builtin_decl_implicit (BUILT_IN_FPUTC));
11964 tree const fn_fwrite = (unlocked
11965 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11966 : builtin_decl_implicit (BUILT_IN_FWRITE));
11967
11968 /* If the return value is used, don't do the transformation. */
11969 if (!ignore)
11970 return NULL_TREE;
11971
11972 /* Verify the arguments in the original call. */
11973 if (!validate_arg (arg0, POINTER_TYPE)
11974 || !validate_arg (arg1, POINTER_TYPE))
11975 return NULL_TREE;
11976
11977 if (! len)
11978 len = c_strlen (arg0, 0);
11979
11980 /* Get the length of the string passed to fputs. If the length
11981 can't be determined, punt. */
11982 if (!len
11983 || TREE_CODE (len) != INTEGER_CST)
11984 return NULL_TREE;
11985
11986 switch (compare_tree_int (len, 1))
11987 {
11988 case -1: /* length is 0, delete the call entirely . */
11989 return omit_one_operand_loc (loc, integer_type_node,
11990 integer_zero_node, arg1);;
11991
11992 case 0: /* length is 1, call fputc. */
11993 {
11994 const char *p = c_getstr (arg0);
11995
11996 if (p != NULL)
11997 {
11998 if (fn_fputc)
11999 return build_call_expr_loc (loc, fn_fputc, 2,
12000 build_int_cst
12001 (integer_type_node, p[0]), arg1);
12002 else
12003 return NULL_TREE;
12004 }
12005 }
12006 /* FALLTHROUGH */
12007 case 1: /* length is greater than 1, call fwrite. */
12008 {
12009 /* If optimizing for size keep fputs. */
12010 if (optimize_function_for_size_p (cfun))
12011 return NULL_TREE;
12012 /* New argument list transforming fputs(string, stream) to
12013 fwrite(string, 1, len, stream). */
12014 if (fn_fwrite)
12015 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12016 size_one_node, len, arg1);
12017 else
12018 return NULL_TREE;
12019 }
12020 default:
12021 gcc_unreachable ();
12022 }
12023 return NULL_TREE;
12024 }
12025
12026 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12027 produced. False otherwise. This is done so that we don't output the error
12028 or warning twice or three times. */
12029
12030 bool
12031 fold_builtin_next_arg (tree exp, bool va_start_p)
12032 {
12033 tree fntype = TREE_TYPE (current_function_decl);
12034 int nargs = call_expr_nargs (exp);
12035 tree arg;
12036 /* There is good chance the current input_location points inside the
12037 definition of the va_start macro (perhaps on the token for
12038 builtin) in a system header, so warnings will not be emitted.
12039 Use the location in real source code. */
12040 source_location current_location =
12041 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12042 NULL);
12043
12044 if (!stdarg_p (fntype))
12045 {
12046 error ("%<va_start%> used in function with fixed args");
12047 return true;
12048 }
12049
12050 if (va_start_p)
12051 {
12052 if (va_start_p && (nargs != 2))
12053 {
12054 error ("wrong number of arguments to function %<va_start%>");
12055 return true;
12056 }
12057 arg = CALL_EXPR_ARG (exp, 1);
12058 }
12059 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12060 when we checked the arguments and if needed issued a warning. */
12061 else
12062 {
12063 if (nargs == 0)
12064 {
12065 /* Evidently an out of date version of <stdarg.h>; can't validate
12066 va_start's second argument, but can still work as intended. */
12067 warning_at (current_location,
12068 OPT_Wvarargs,
12069 "%<__builtin_next_arg%> called without an argument");
12070 return true;
12071 }
12072 else if (nargs > 1)
12073 {
12074 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12075 return true;
12076 }
12077 arg = CALL_EXPR_ARG (exp, 0);
12078 }
12079
12080 if (TREE_CODE (arg) == SSA_NAME)
12081 arg = SSA_NAME_VAR (arg);
12082
12083 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12084 or __builtin_next_arg (0) the first time we see it, after checking
12085 the arguments and if needed issuing a warning. */
12086 if (!integer_zerop (arg))
12087 {
12088 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12089
12090 /* Strip off all nops for the sake of the comparison. This
12091 is not quite the same as STRIP_NOPS. It does more.
12092 We must also strip off INDIRECT_EXPR for C++ reference
12093 parameters. */
12094 while (CONVERT_EXPR_P (arg)
12095 || TREE_CODE (arg) == INDIRECT_REF)
12096 arg = TREE_OPERAND (arg, 0);
12097 if (arg != last_parm)
12098 {
12099 /* FIXME: Sometimes with the tree optimizers we can get the
12100 not the last argument even though the user used the last
12101 argument. We just warn and set the arg to be the last
12102 argument so that we will get wrong-code because of
12103 it. */
12104 warning_at (current_location,
12105 OPT_Wvarargs,
12106 "second parameter of %<va_start%> not last named argument");
12107 }
12108
12109 /* Undefined by C99 7.15.1.4p4 (va_start):
12110 "If the parameter parmN is declared with the register storage
12111 class, with a function or array type, or with a type that is
12112 not compatible with the type that results after application of
12113 the default argument promotions, the behavior is undefined."
12114 */
12115 else if (DECL_REGISTER (arg))
12116 {
12117 warning_at (current_location,
12118 OPT_Wvarargs,
12119 "undefined behaviour when second parameter of "
12120 "%<va_start%> is declared with %<register%> storage");
12121 }
12122
12123 /* We want to verify the second parameter just once before the tree
12124 optimizers are run and then avoid keeping it in the tree,
12125 as otherwise we could warn even for correct code like:
12126 void foo (int i, ...)
12127 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12128 if (va_start_p)
12129 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12130 else
12131 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12132 }
12133 return false;
12134 }
12135
12136
12137 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12138 ORIG may be null if this is a 2-argument call. We don't attempt to
12139 simplify calls with more than 3 arguments.
12140
12141 Return NULL_TREE if no simplification was possible, otherwise return the
12142 simplified form of the call as a tree. If IGNORED is true, it means that
12143 the caller does not use the returned value of the function. */
12144
12145 static tree
12146 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12147 tree orig, int ignored)
12148 {
12149 tree call, retval;
12150 const char *fmt_str = NULL;
12151
12152 /* Verify the required arguments in the original call. We deal with two
12153 types of sprintf() calls: 'sprintf (str, fmt)' and
12154 'sprintf (dest, "%s", orig)'. */
12155 if (!validate_arg (dest, POINTER_TYPE)
12156 || !validate_arg (fmt, POINTER_TYPE))
12157 return NULL_TREE;
12158 if (orig && !validate_arg (orig, POINTER_TYPE))
12159 return NULL_TREE;
12160
12161 /* Check whether the format is a literal string constant. */
12162 fmt_str = c_getstr (fmt);
12163 if (fmt_str == NULL)
12164 return NULL_TREE;
12165
12166 call = NULL_TREE;
12167 retval = NULL_TREE;
12168
12169 if (!init_target_chars ())
12170 return NULL_TREE;
12171
12172 /* If the format doesn't contain % args or %%, use strcpy. */
12173 if (strchr (fmt_str, target_percent) == NULL)
12174 {
12175 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12176
12177 if (!fn)
12178 return NULL_TREE;
12179
12180 /* Don't optimize sprintf (buf, "abc", ptr++). */
12181 if (orig)
12182 return NULL_TREE;
12183
12184 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12185 'format' is known to contain no % formats. */
12186 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12187 if (!ignored)
12188 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12189 }
12190
12191 /* If the format is "%s", use strcpy if the result isn't used. */
12192 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12193 {
12194 tree fn;
12195 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12196
12197 if (!fn)
12198 return NULL_TREE;
12199
12200 /* Don't crash on sprintf (str1, "%s"). */
12201 if (!orig)
12202 return NULL_TREE;
12203
12204 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12205 if (!ignored)
12206 {
12207 retval = c_strlen (orig, 1);
12208 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12209 return NULL_TREE;
12210 }
12211 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12212 }
12213
12214 if (call && retval)
12215 {
12216 retval = fold_convert_loc
12217 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12218 retval);
12219 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12220 }
12221 else
12222 return call;
12223 }
12224
12225 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12226 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12227 attempt to simplify calls with more than 4 arguments.
12228
12229 Return NULL_TREE if no simplification was possible, otherwise return the
12230 simplified form of the call as a tree. If IGNORED is true, it means that
12231 the caller does not use the returned value of the function. */
12232
12233 static tree
12234 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12235 tree orig, int ignored)
12236 {
12237 tree call, retval;
12238 const char *fmt_str = NULL;
12239 unsigned HOST_WIDE_INT destlen;
12240
12241 /* Verify the required arguments in the original call. We deal with two
12242 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12243 'snprintf (dest, cst, "%s", orig)'. */
12244 if (!validate_arg (dest, POINTER_TYPE)
12245 || !validate_arg (destsize, INTEGER_TYPE)
12246 || !validate_arg (fmt, POINTER_TYPE))
12247 return NULL_TREE;
12248 if (orig && !validate_arg (orig, POINTER_TYPE))
12249 return NULL_TREE;
12250
12251 if (!tree_fits_uhwi_p (destsize))
12252 return NULL_TREE;
12253
12254 /* Check whether the format is a literal string constant. */
12255 fmt_str = c_getstr (fmt);
12256 if (fmt_str == NULL)
12257 return NULL_TREE;
12258
12259 call = NULL_TREE;
12260 retval = NULL_TREE;
12261
12262 if (!init_target_chars ())
12263 return NULL_TREE;
12264
12265 destlen = tree_to_uhwi (destsize);
12266
12267 /* If the format doesn't contain % args or %%, use strcpy. */
12268 if (strchr (fmt_str, target_percent) == NULL)
12269 {
12270 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12271 size_t len = strlen (fmt_str);
12272
12273 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12274 if (orig)
12275 return NULL_TREE;
12276
12277 /* We could expand this as
12278 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12279 or to
12280 memcpy (str, fmt_with_nul_at_cstm1, cst);
12281 but in the former case that might increase code size
12282 and in the latter case grow .rodata section too much.
12283 So punt for now. */
12284 if (len >= destlen)
12285 return NULL_TREE;
12286
12287 if (!fn)
12288 return NULL_TREE;
12289
12290 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12291 'format' is known to contain no % formats and
12292 strlen (fmt) < cst. */
12293 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12294
12295 if (!ignored)
12296 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12297 }
12298
12299 /* If the format is "%s", use strcpy if the result isn't used. */
12300 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12301 {
12302 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12303 unsigned HOST_WIDE_INT origlen;
12304
12305 /* Don't crash on snprintf (str1, cst, "%s"). */
12306 if (!orig)
12307 return NULL_TREE;
12308
12309 retval = c_strlen (orig, 1);
12310 if (!retval || !tree_fits_uhwi_p (retval))
12311 return NULL_TREE;
12312
12313 origlen = tree_to_uhwi (retval);
12314 /* We could expand this as
12315 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12316 or to
12317 memcpy (str1, str2_with_nul_at_cstm1, cst);
12318 but in the former case that might increase code size
12319 and in the latter case grow .rodata section too much.
12320 So punt for now. */
12321 if (origlen >= destlen)
12322 return NULL_TREE;
12323
12324 /* Convert snprintf (str1, cst, "%s", str2) into
12325 strcpy (str1, str2) if strlen (str2) < cst. */
12326 if (!fn)
12327 return NULL_TREE;
12328
12329 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12330
12331 if (ignored)
12332 retval = NULL_TREE;
12333 }
12334
12335 if (call && retval)
12336 {
12337 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12338 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12339 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12340 }
12341 else
12342 return call;
12343 }
12344
12345 /* Expand a call EXP to __builtin_object_size. */
12346
12347 rtx
12348 expand_builtin_object_size (tree exp)
12349 {
12350 tree ost;
12351 int object_size_type;
12352 tree fndecl = get_callee_fndecl (exp);
12353
12354 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12355 {
12356 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12357 exp, fndecl);
12358 expand_builtin_trap ();
12359 return const0_rtx;
12360 }
12361
12362 ost = CALL_EXPR_ARG (exp, 1);
12363 STRIP_NOPS (ost);
12364
12365 if (TREE_CODE (ost) != INTEGER_CST
12366 || tree_int_cst_sgn (ost) < 0
12367 || compare_tree_int (ost, 3) > 0)
12368 {
12369 error ("%Klast argument of %D is not integer constant between 0 and 3",
12370 exp, fndecl);
12371 expand_builtin_trap ();
12372 return const0_rtx;
12373 }
12374
12375 object_size_type = tree_to_shwi (ost);
12376
12377 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12378 }
12379
12380 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12381 FCODE is the BUILT_IN_* to use.
12382 Return NULL_RTX if we failed; the caller should emit a normal call,
12383 otherwise try to get the result in TARGET, if convenient (and in
12384 mode MODE if that's convenient). */
12385
12386 static rtx
12387 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12388 enum built_in_function fcode)
12389 {
12390 tree dest, src, len, size;
12391
12392 if (!validate_arglist (exp,
12393 POINTER_TYPE,
12394 fcode == BUILT_IN_MEMSET_CHK
12395 ? INTEGER_TYPE : POINTER_TYPE,
12396 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12397 return NULL_RTX;
12398
12399 dest = CALL_EXPR_ARG (exp, 0);
12400 src = CALL_EXPR_ARG (exp, 1);
12401 len = CALL_EXPR_ARG (exp, 2);
12402 size = CALL_EXPR_ARG (exp, 3);
12403
12404 if (! tree_fits_uhwi_p (size))
12405 return NULL_RTX;
12406
12407 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12408 {
12409 tree fn;
12410
12411 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12412 {
12413 warning_at (tree_nonartificial_location (exp),
12414 0, "%Kcall to %D will always overflow destination buffer",
12415 exp, get_callee_fndecl (exp));
12416 return NULL_RTX;
12417 }
12418
12419 fn = NULL_TREE;
12420 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12421 mem{cpy,pcpy,move,set} is available. */
12422 switch (fcode)
12423 {
12424 case BUILT_IN_MEMCPY_CHK:
12425 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12426 break;
12427 case BUILT_IN_MEMPCPY_CHK:
12428 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12429 break;
12430 case BUILT_IN_MEMMOVE_CHK:
12431 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12432 break;
12433 case BUILT_IN_MEMSET_CHK:
12434 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12435 break;
12436 default:
12437 break;
12438 }
12439
12440 if (! fn)
12441 return NULL_RTX;
12442
12443 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12444 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12445 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12446 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12447 }
12448 else if (fcode == BUILT_IN_MEMSET_CHK)
12449 return NULL_RTX;
12450 else
12451 {
12452 unsigned int dest_align = get_pointer_alignment (dest);
12453
12454 /* If DEST is not a pointer type, call the normal function. */
12455 if (dest_align == 0)
12456 return NULL_RTX;
12457
12458 /* If SRC and DEST are the same (and not volatile), do nothing. */
12459 if (operand_equal_p (src, dest, 0))
12460 {
12461 tree expr;
12462
12463 if (fcode != BUILT_IN_MEMPCPY_CHK)
12464 {
12465 /* Evaluate and ignore LEN in case it has side-effects. */
12466 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12467 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12468 }
12469
12470 expr = fold_build_pointer_plus (dest, len);
12471 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12472 }
12473
12474 /* __memmove_chk special case. */
12475 if (fcode == BUILT_IN_MEMMOVE_CHK)
12476 {
12477 unsigned int src_align = get_pointer_alignment (src);
12478
12479 if (src_align == 0)
12480 return NULL_RTX;
12481
12482 /* If src is categorized for a readonly section we can use
12483 normal __memcpy_chk. */
12484 if (readonly_data_expr (src))
12485 {
12486 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12487 if (!fn)
12488 return NULL_RTX;
12489 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12490 dest, src, len, size);
12491 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12492 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12493 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12494 }
12495 }
12496 return NULL_RTX;
12497 }
12498 }
12499
12500 /* Emit warning if a buffer overflow is detected at compile time. */
12501
12502 static void
12503 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12504 {
12505 int is_strlen = 0;
12506 tree len, size;
12507 location_t loc = tree_nonartificial_location (exp);
12508
12509 switch (fcode)
12510 {
12511 case BUILT_IN_STRCPY_CHK:
12512 case BUILT_IN_STPCPY_CHK:
12513 /* For __strcat_chk the warning will be emitted only if overflowing
12514 by at least strlen (dest) + 1 bytes. */
12515 case BUILT_IN_STRCAT_CHK:
12516 len = CALL_EXPR_ARG (exp, 1);
12517 size = CALL_EXPR_ARG (exp, 2);
12518 is_strlen = 1;
12519 break;
12520 case BUILT_IN_STRNCAT_CHK:
12521 case BUILT_IN_STRNCPY_CHK:
12522 case BUILT_IN_STPNCPY_CHK:
12523 len = CALL_EXPR_ARG (exp, 2);
12524 size = CALL_EXPR_ARG (exp, 3);
12525 break;
12526 case BUILT_IN_SNPRINTF_CHK:
12527 case BUILT_IN_VSNPRINTF_CHK:
12528 len = CALL_EXPR_ARG (exp, 1);
12529 size = CALL_EXPR_ARG (exp, 3);
12530 break;
12531 default:
12532 gcc_unreachable ();
12533 }
12534
12535 if (!len || !size)
12536 return;
12537
12538 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12539 return;
12540
12541 if (is_strlen)
12542 {
12543 len = c_strlen (len, 1);
12544 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12545 return;
12546 }
12547 else if (fcode == BUILT_IN_STRNCAT_CHK)
12548 {
12549 tree src = CALL_EXPR_ARG (exp, 1);
12550 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12551 return;
12552 src = c_strlen (src, 1);
12553 if (! src || ! tree_fits_uhwi_p (src))
12554 {
12555 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12556 exp, get_callee_fndecl (exp));
12557 return;
12558 }
12559 else if (tree_int_cst_lt (src, size))
12560 return;
12561 }
12562 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12563 return;
12564
12565 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12566 exp, get_callee_fndecl (exp));
12567 }
12568
12569 /* Emit warning if a buffer overflow is detected at compile time
12570 in __sprintf_chk/__vsprintf_chk calls. */
12571
12572 static void
12573 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12574 {
12575 tree size, len, fmt;
12576 const char *fmt_str;
12577 int nargs = call_expr_nargs (exp);
12578
12579 /* Verify the required arguments in the original call. */
12580
12581 if (nargs < 4)
12582 return;
12583 size = CALL_EXPR_ARG (exp, 2);
12584 fmt = CALL_EXPR_ARG (exp, 3);
12585
12586 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12587 return;
12588
12589 /* Check whether the format is a literal string constant. */
12590 fmt_str = c_getstr (fmt);
12591 if (fmt_str == NULL)
12592 return;
12593
12594 if (!init_target_chars ())
12595 return;
12596
12597 /* If the format doesn't contain % args or %%, we know its size. */
12598 if (strchr (fmt_str, target_percent) == 0)
12599 len = build_int_cstu (size_type_node, strlen (fmt_str));
12600 /* If the format is "%s" and first ... argument is a string literal,
12601 we know it too. */
12602 else if (fcode == BUILT_IN_SPRINTF_CHK
12603 && strcmp (fmt_str, target_percent_s) == 0)
12604 {
12605 tree arg;
12606
12607 if (nargs < 5)
12608 return;
12609 arg = CALL_EXPR_ARG (exp, 4);
12610 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12611 return;
12612
12613 len = c_strlen (arg, 1);
12614 if (!len || ! tree_fits_uhwi_p (len))
12615 return;
12616 }
12617 else
12618 return;
12619
12620 if (! tree_int_cst_lt (len, size))
12621 warning_at (tree_nonartificial_location (exp),
12622 0, "%Kcall to %D will always overflow destination buffer",
12623 exp, get_callee_fndecl (exp));
12624 }
12625
12626 /* Emit warning if a free is called with address of a variable. */
12627
12628 static void
12629 maybe_emit_free_warning (tree exp)
12630 {
12631 tree arg = CALL_EXPR_ARG (exp, 0);
12632
12633 STRIP_NOPS (arg);
12634 if (TREE_CODE (arg) != ADDR_EXPR)
12635 return;
12636
12637 arg = get_base_address (TREE_OPERAND (arg, 0));
12638 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12639 return;
12640
12641 if (SSA_VAR_P (arg))
12642 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12643 "%Kattempt to free a non-heap object %qD", exp, arg);
12644 else
12645 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12646 "%Kattempt to free a non-heap object", exp);
12647 }
12648
12649 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12650 if possible. */
12651
12652 tree
12653 fold_builtin_object_size (tree ptr, tree ost)
12654 {
12655 unsigned HOST_WIDE_INT bytes;
12656 int object_size_type;
12657
12658 if (!validate_arg (ptr, POINTER_TYPE)
12659 || !validate_arg (ost, INTEGER_TYPE))
12660 return NULL_TREE;
12661
12662 STRIP_NOPS (ost);
12663
12664 if (TREE_CODE (ost) != INTEGER_CST
12665 || tree_int_cst_sgn (ost) < 0
12666 || compare_tree_int (ost, 3) > 0)
12667 return NULL_TREE;
12668
12669 object_size_type = tree_to_shwi (ost);
12670
12671 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12672 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12673 and (size_t) 0 for types 2 and 3. */
12674 if (TREE_SIDE_EFFECTS (ptr))
12675 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12676
12677 if (TREE_CODE (ptr) == ADDR_EXPR)
12678 {
12679 bytes = compute_builtin_object_size (ptr, object_size_type);
12680 if (double_int_fits_to_tree_p (size_type_node,
12681 double_int::from_uhwi (bytes)))
12682 return build_int_cstu (size_type_node, bytes);
12683 }
12684 else if (TREE_CODE (ptr) == SSA_NAME)
12685 {
12686 /* If object size is not known yet, delay folding until
12687 later. Maybe subsequent passes will help determining
12688 it. */
12689 bytes = compute_builtin_object_size (ptr, object_size_type);
12690 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12691 && double_int_fits_to_tree_p (size_type_node,
12692 double_int::from_uhwi (bytes)))
12693 return build_int_cstu (size_type_node, bytes);
12694 }
12695
12696 return NULL_TREE;
12697 }
12698
12699 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12700 DEST, SRC, LEN, and SIZE are the arguments to the call.
12701 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12702 code of the builtin. If MAXLEN is not NULL, it is maximum length
12703 passed as third argument. */
12704
12705 tree
12706 fold_builtin_memory_chk (location_t loc, tree fndecl,
12707 tree dest, tree src, tree len, tree size,
12708 tree maxlen, bool ignore,
12709 enum built_in_function fcode)
12710 {
12711 tree fn;
12712
12713 if (!validate_arg (dest, POINTER_TYPE)
12714 || !validate_arg (src,
12715 (fcode == BUILT_IN_MEMSET_CHK
12716 ? INTEGER_TYPE : POINTER_TYPE))
12717 || !validate_arg (len, INTEGER_TYPE)
12718 || !validate_arg (size, INTEGER_TYPE))
12719 return NULL_TREE;
12720
12721 /* If SRC and DEST are the same (and not volatile), return DEST
12722 (resp. DEST+LEN for __mempcpy_chk). */
12723 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12724 {
12725 if (fcode != BUILT_IN_MEMPCPY_CHK)
12726 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12727 dest, len);
12728 else
12729 {
12730 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12731 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12732 }
12733 }
12734
12735 if (! tree_fits_uhwi_p (size))
12736 return NULL_TREE;
12737
12738 if (! integer_all_onesp (size))
12739 {
12740 if (! tree_fits_uhwi_p (len))
12741 {
12742 /* If LEN is not constant, try MAXLEN too.
12743 For MAXLEN only allow optimizing into non-_ocs function
12744 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12745 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12746 {
12747 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12748 {
12749 /* (void) __mempcpy_chk () can be optimized into
12750 (void) __memcpy_chk (). */
12751 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12752 if (!fn)
12753 return NULL_TREE;
12754
12755 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12756 }
12757 return NULL_TREE;
12758 }
12759 }
12760 else
12761 maxlen = len;
12762
12763 if (tree_int_cst_lt (size, maxlen))
12764 return NULL_TREE;
12765 }
12766
12767 fn = NULL_TREE;
12768 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12769 mem{cpy,pcpy,move,set} is available. */
12770 switch (fcode)
12771 {
12772 case BUILT_IN_MEMCPY_CHK:
12773 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12774 break;
12775 case BUILT_IN_MEMPCPY_CHK:
12776 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12777 break;
12778 case BUILT_IN_MEMMOVE_CHK:
12779 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12780 break;
12781 case BUILT_IN_MEMSET_CHK:
12782 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12783 break;
12784 default:
12785 break;
12786 }
12787
12788 if (!fn)
12789 return NULL_TREE;
12790
12791 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12792 }
12793
12794 /* Fold a call to the __st[rp]cpy_chk builtin.
12795 DEST, SRC, and SIZE are the arguments to the call.
12796 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12797 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12798 strings passed as second argument. */
12799
12800 tree
12801 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12802 tree src, tree size,
12803 tree maxlen, bool ignore,
12804 enum built_in_function fcode)
12805 {
12806 tree len, fn;
12807
12808 if (!validate_arg (dest, POINTER_TYPE)
12809 || !validate_arg (src, POINTER_TYPE)
12810 || !validate_arg (size, INTEGER_TYPE))
12811 return NULL_TREE;
12812
12813 /* If SRC and DEST are the same (and not volatile), return DEST. */
12814 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12815 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12816
12817 if (! tree_fits_uhwi_p (size))
12818 return NULL_TREE;
12819
12820 if (! integer_all_onesp (size))
12821 {
12822 len = c_strlen (src, 1);
12823 if (! len || ! tree_fits_uhwi_p (len))
12824 {
12825 /* If LEN is not constant, try MAXLEN too.
12826 For MAXLEN only allow optimizing into non-_ocs function
12827 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12828 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12829 {
12830 if (fcode == BUILT_IN_STPCPY_CHK)
12831 {
12832 if (! ignore)
12833 return NULL_TREE;
12834
12835 /* If return value of __stpcpy_chk is ignored,
12836 optimize into __strcpy_chk. */
12837 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12838 if (!fn)
12839 return NULL_TREE;
12840
12841 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12842 }
12843
12844 if (! len || TREE_SIDE_EFFECTS (len))
12845 return NULL_TREE;
12846
12847 /* If c_strlen returned something, but not a constant,
12848 transform __strcpy_chk into __memcpy_chk. */
12849 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12850 if (!fn)
12851 return NULL_TREE;
12852
12853 len = fold_convert_loc (loc, size_type_node, len);
12854 len = size_binop_loc (loc, PLUS_EXPR, len,
12855 build_int_cst (size_type_node, 1));
12856 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12857 build_call_expr_loc (loc, fn, 4,
12858 dest, src, len, size));
12859 }
12860 }
12861 else
12862 maxlen = len;
12863
12864 if (! tree_int_cst_lt (maxlen, size))
12865 return NULL_TREE;
12866 }
12867
12868 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12869 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12870 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12871 if (!fn)
12872 return NULL_TREE;
12873
12874 return build_call_expr_loc (loc, fn, 2, dest, src);
12875 }
12876
12877 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12878 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12879 length passed as third argument. IGNORE is true if return value can be
12880 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12881
12882 tree
12883 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12884 tree len, tree size, tree maxlen, bool ignore,
12885 enum built_in_function fcode)
12886 {
12887 tree fn;
12888
12889 if (!validate_arg (dest, POINTER_TYPE)
12890 || !validate_arg (src, POINTER_TYPE)
12891 || !validate_arg (len, INTEGER_TYPE)
12892 || !validate_arg (size, INTEGER_TYPE))
12893 return NULL_TREE;
12894
12895 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12896 {
12897 /* If return value of __stpncpy_chk is ignored,
12898 optimize into __strncpy_chk. */
12899 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12900 if (fn)
12901 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12902 }
12903
12904 if (! tree_fits_uhwi_p (size))
12905 return NULL_TREE;
12906
12907 if (! integer_all_onesp (size))
12908 {
12909 if (! tree_fits_uhwi_p (len))
12910 {
12911 /* If LEN is not constant, try MAXLEN too.
12912 For MAXLEN only allow optimizing into non-_ocs function
12913 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12914 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12915 return NULL_TREE;
12916 }
12917 else
12918 maxlen = len;
12919
12920 if (tree_int_cst_lt (size, maxlen))
12921 return NULL_TREE;
12922 }
12923
12924 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12925 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12926 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12927 if (!fn)
12928 return NULL_TREE;
12929
12930 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12931 }
12932
12933 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12934 are the arguments to the call. */
12935
12936 static tree
12937 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12938 tree src, tree size)
12939 {
12940 tree fn;
12941 const char *p;
12942
12943 if (!validate_arg (dest, POINTER_TYPE)
12944 || !validate_arg (src, POINTER_TYPE)
12945 || !validate_arg (size, INTEGER_TYPE))
12946 return NULL_TREE;
12947
12948 p = c_getstr (src);
12949 /* If the SRC parameter is "", return DEST. */
12950 if (p && *p == '\0')
12951 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12952
12953 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12954 return NULL_TREE;
12955
12956 /* If __builtin_strcat_chk is used, assume strcat is available. */
12957 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12958 if (!fn)
12959 return NULL_TREE;
12960
12961 return build_call_expr_loc (loc, fn, 2, dest, src);
12962 }
12963
12964 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12965 LEN, and SIZE. */
12966
12967 static tree
12968 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12969 tree dest, tree src, tree len, tree size)
12970 {
12971 tree fn;
12972 const char *p;
12973
12974 if (!validate_arg (dest, POINTER_TYPE)
12975 || !validate_arg (src, POINTER_TYPE)
12976 || !validate_arg (size, INTEGER_TYPE)
12977 || !validate_arg (size, INTEGER_TYPE))
12978 return NULL_TREE;
12979
12980 p = c_getstr (src);
12981 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12982 if (p && *p == '\0')
12983 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12984 else if (integer_zerop (len))
12985 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12986
12987 if (! tree_fits_uhwi_p (size))
12988 return NULL_TREE;
12989
12990 if (! integer_all_onesp (size))
12991 {
12992 tree src_len = c_strlen (src, 1);
12993 if (src_len
12994 && tree_fits_uhwi_p (src_len)
12995 && tree_fits_uhwi_p (len)
12996 && ! tree_int_cst_lt (len, src_len))
12997 {
12998 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12999 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13000 if (!fn)
13001 return NULL_TREE;
13002
13003 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13004 }
13005 return NULL_TREE;
13006 }
13007
13008 /* If __builtin_strncat_chk is used, assume strncat is available. */
13009 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13010 if (!fn)
13011 return NULL_TREE;
13012
13013 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13014 }
13015
13016 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13017 Return NULL_TREE if a normal call should be emitted rather than
13018 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13019 or BUILT_IN_VSPRINTF_CHK. */
13020
13021 static tree
13022 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13023 enum built_in_function fcode)
13024 {
13025 tree dest, size, len, fn, fmt, flag;
13026 const char *fmt_str;
13027
13028 /* Verify the required arguments in the original call. */
13029 if (nargs < 4)
13030 return NULL_TREE;
13031 dest = args[0];
13032 if (!validate_arg (dest, POINTER_TYPE))
13033 return NULL_TREE;
13034 flag = args[1];
13035 if (!validate_arg (flag, INTEGER_TYPE))
13036 return NULL_TREE;
13037 size = args[2];
13038 if (!validate_arg (size, INTEGER_TYPE))
13039 return NULL_TREE;
13040 fmt = args[3];
13041 if (!validate_arg (fmt, POINTER_TYPE))
13042 return NULL_TREE;
13043
13044 if (! tree_fits_uhwi_p (size))
13045 return NULL_TREE;
13046
13047 len = NULL_TREE;
13048
13049 if (!init_target_chars ())
13050 return NULL_TREE;
13051
13052 /* Check whether the format is a literal string constant. */
13053 fmt_str = c_getstr (fmt);
13054 if (fmt_str != NULL)
13055 {
13056 /* If the format doesn't contain % args or %%, we know the size. */
13057 if (strchr (fmt_str, target_percent) == 0)
13058 {
13059 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13060 len = build_int_cstu (size_type_node, strlen (fmt_str));
13061 }
13062 /* If the format is "%s" and first ... argument is a string literal,
13063 we know the size too. */
13064 else if (fcode == BUILT_IN_SPRINTF_CHK
13065 && strcmp (fmt_str, target_percent_s) == 0)
13066 {
13067 tree arg;
13068
13069 if (nargs == 5)
13070 {
13071 arg = args[4];
13072 if (validate_arg (arg, POINTER_TYPE))
13073 {
13074 len = c_strlen (arg, 1);
13075 if (! len || ! tree_fits_uhwi_p (len))
13076 len = NULL_TREE;
13077 }
13078 }
13079 }
13080 }
13081
13082 if (! integer_all_onesp (size))
13083 {
13084 if (! len || ! tree_int_cst_lt (len, size))
13085 return NULL_TREE;
13086 }
13087
13088 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13089 or if format doesn't contain % chars or is "%s". */
13090 if (! integer_zerop (flag))
13091 {
13092 if (fmt_str == NULL)
13093 return NULL_TREE;
13094 if (strchr (fmt_str, target_percent) != NULL
13095 && strcmp (fmt_str, target_percent_s))
13096 return NULL_TREE;
13097 }
13098
13099 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13100 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13101 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13102 if (!fn)
13103 return NULL_TREE;
13104
13105 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13106 }
13107
13108 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13109 a normal call should be emitted rather than expanding the function
13110 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13111
13112 static tree
13113 fold_builtin_sprintf_chk (location_t loc, tree exp,
13114 enum built_in_function fcode)
13115 {
13116 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13117 CALL_EXPR_ARGP (exp), fcode);
13118 }
13119
13120 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13121 NULL_TREE if a normal call should be emitted rather than expanding
13122 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13123 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13124 passed as second argument. */
13125
13126 static tree
13127 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13128 tree maxlen, enum built_in_function fcode)
13129 {
13130 tree dest, size, len, fn, fmt, flag;
13131 const char *fmt_str;
13132
13133 /* Verify the required arguments in the original call. */
13134 if (nargs < 5)
13135 return NULL_TREE;
13136 dest = args[0];
13137 if (!validate_arg (dest, POINTER_TYPE))
13138 return NULL_TREE;
13139 len = args[1];
13140 if (!validate_arg (len, INTEGER_TYPE))
13141 return NULL_TREE;
13142 flag = args[2];
13143 if (!validate_arg (flag, INTEGER_TYPE))
13144 return NULL_TREE;
13145 size = args[3];
13146 if (!validate_arg (size, INTEGER_TYPE))
13147 return NULL_TREE;
13148 fmt = args[4];
13149 if (!validate_arg (fmt, POINTER_TYPE))
13150 return NULL_TREE;
13151
13152 if (! tree_fits_uhwi_p (size))
13153 return NULL_TREE;
13154
13155 if (! integer_all_onesp (size))
13156 {
13157 if (! tree_fits_uhwi_p (len))
13158 {
13159 /* If LEN is not constant, try MAXLEN too.
13160 For MAXLEN only allow optimizing into non-_ocs function
13161 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13162 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13163 return NULL_TREE;
13164 }
13165 else
13166 maxlen = len;
13167
13168 if (tree_int_cst_lt (size, maxlen))
13169 return NULL_TREE;
13170 }
13171
13172 if (!init_target_chars ())
13173 return NULL_TREE;
13174
13175 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13176 or if format doesn't contain % chars or is "%s". */
13177 if (! integer_zerop (flag))
13178 {
13179 fmt_str = c_getstr (fmt);
13180 if (fmt_str == NULL)
13181 return NULL_TREE;
13182 if (strchr (fmt_str, target_percent) != NULL
13183 && strcmp (fmt_str, target_percent_s))
13184 return NULL_TREE;
13185 }
13186
13187 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13188 available. */
13189 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13190 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13191 if (!fn)
13192 return NULL_TREE;
13193
13194 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13195 }
13196
13197 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13198 a normal call should be emitted rather than expanding the function
13199 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13200 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13201 passed as second argument. */
13202
13203 static tree
13204 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13205 enum built_in_function fcode)
13206 {
13207 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13208 CALL_EXPR_ARGP (exp), maxlen, fcode);
13209 }
13210
13211 /* Builtins with folding operations that operate on "..." arguments
13212 need special handling; we need to store the arguments in a convenient
13213 data structure before attempting any folding. Fortunately there are
13214 only a few builtins that fall into this category. FNDECL is the
13215 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13216 result of the function call is ignored. */
13217
13218 static tree
13219 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13220 bool ignore ATTRIBUTE_UNUSED)
13221 {
13222 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13223 tree ret = NULL_TREE;
13224
13225 switch (fcode)
13226 {
13227 case BUILT_IN_SPRINTF_CHK:
13228 case BUILT_IN_VSPRINTF_CHK:
13229 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13230 break;
13231
13232 case BUILT_IN_SNPRINTF_CHK:
13233 case BUILT_IN_VSNPRINTF_CHK:
13234 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13235 break;
13236
13237 case BUILT_IN_FPCLASSIFY:
13238 ret = fold_builtin_fpclassify (loc, exp);
13239 break;
13240
13241 default:
13242 break;
13243 }
13244 if (ret)
13245 {
13246 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13247 SET_EXPR_LOCATION (ret, loc);
13248 TREE_NO_WARNING (ret) = 1;
13249 return ret;
13250 }
13251 return NULL_TREE;
13252 }
13253
13254 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13255 FMT and ARG are the arguments to the call; we don't fold cases with
13256 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13257
13258 Return NULL_TREE if no simplification was possible, otherwise return the
13259 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13260 code of the function to be simplified. */
13261
13262 static tree
13263 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13264 tree arg, bool ignore,
13265 enum built_in_function fcode)
13266 {
13267 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13268 const char *fmt_str = NULL;
13269
13270 /* If the return value is used, don't do the transformation. */
13271 if (! ignore)
13272 return NULL_TREE;
13273
13274 /* Verify the required arguments in the original call. */
13275 if (!validate_arg (fmt, POINTER_TYPE))
13276 return NULL_TREE;
13277
13278 /* Check whether the format is a literal string constant. */
13279 fmt_str = c_getstr (fmt);
13280 if (fmt_str == NULL)
13281 return NULL_TREE;
13282
13283 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13284 {
13285 /* If we're using an unlocked function, assume the other
13286 unlocked functions exist explicitly. */
13287 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13288 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13289 }
13290 else
13291 {
13292 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13293 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13294 }
13295
13296 if (!init_target_chars ())
13297 return NULL_TREE;
13298
13299 if (strcmp (fmt_str, target_percent_s) == 0
13300 || strchr (fmt_str, target_percent) == NULL)
13301 {
13302 const char *str;
13303
13304 if (strcmp (fmt_str, target_percent_s) == 0)
13305 {
13306 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13307 return NULL_TREE;
13308
13309 if (!arg || !validate_arg (arg, POINTER_TYPE))
13310 return NULL_TREE;
13311
13312 str = c_getstr (arg);
13313 if (str == NULL)
13314 return NULL_TREE;
13315 }
13316 else
13317 {
13318 /* The format specifier doesn't contain any '%' characters. */
13319 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13320 && arg)
13321 return NULL_TREE;
13322 str = fmt_str;
13323 }
13324
13325 /* If the string was "", printf does nothing. */
13326 if (str[0] == '\0')
13327 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13328
13329 /* If the string has length of 1, call putchar. */
13330 if (str[1] == '\0')
13331 {
13332 /* Given printf("c"), (where c is any one character,)
13333 convert "c"[0] to an int and pass that to the replacement
13334 function. */
13335 newarg = build_int_cst (integer_type_node, str[0]);
13336 if (fn_putchar)
13337 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13338 }
13339 else
13340 {
13341 /* If the string was "string\n", call puts("string"). */
13342 size_t len = strlen (str);
13343 if ((unsigned char)str[len - 1] == target_newline
13344 && (size_t) (int) len == len
13345 && (int) len > 0)
13346 {
13347 char *newstr;
13348 tree offset_node, string_cst;
13349
13350 /* Create a NUL-terminated string that's one char shorter
13351 than the original, stripping off the trailing '\n'. */
13352 newarg = build_string_literal (len, str);
13353 string_cst = string_constant (newarg, &offset_node);
13354 gcc_checking_assert (string_cst
13355 && (TREE_STRING_LENGTH (string_cst)
13356 == (int) len)
13357 && integer_zerop (offset_node)
13358 && (unsigned char)
13359 TREE_STRING_POINTER (string_cst)[len - 1]
13360 == target_newline);
13361 /* build_string_literal creates a new STRING_CST,
13362 modify it in place to avoid double copying. */
13363 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13364 newstr[len - 1] = '\0';
13365 if (fn_puts)
13366 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13367 }
13368 else
13369 /* We'd like to arrange to call fputs(string,stdout) here,
13370 but we need stdout and don't have a way to get it yet. */
13371 return NULL_TREE;
13372 }
13373 }
13374
13375 /* The other optimizations can be done only on the non-va_list variants. */
13376 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13377 return NULL_TREE;
13378
13379 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13380 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13381 {
13382 if (!arg || !validate_arg (arg, POINTER_TYPE))
13383 return NULL_TREE;
13384 if (fn_puts)
13385 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13386 }
13387
13388 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13389 else if (strcmp (fmt_str, target_percent_c) == 0)
13390 {
13391 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13392 return NULL_TREE;
13393 if (fn_putchar)
13394 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13395 }
13396
13397 if (!call)
13398 return NULL_TREE;
13399
13400 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13401 }
13402
13403 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13404 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13405 more than 3 arguments, and ARG may be null in the 2-argument case.
13406
13407 Return NULL_TREE if no simplification was possible, otherwise return the
13408 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13409 code of the function to be simplified. */
13410
13411 static tree
13412 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13413 tree fmt, tree arg, bool ignore,
13414 enum built_in_function fcode)
13415 {
13416 tree fn_fputc, fn_fputs, call = NULL_TREE;
13417 const char *fmt_str = NULL;
13418
13419 /* If the return value is used, don't do the transformation. */
13420 if (! ignore)
13421 return NULL_TREE;
13422
13423 /* Verify the required arguments in the original call. */
13424 if (!validate_arg (fp, POINTER_TYPE))
13425 return NULL_TREE;
13426 if (!validate_arg (fmt, POINTER_TYPE))
13427 return NULL_TREE;
13428
13429 /* Check whether the format is a literal string constant. */
13430 fmt_str = c_getstr (fmt);
13431 if (fmt_str == NULL)
13432 return NULL_TREE;
13433
13434 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13435 {
13436 /* If we're using an unlocked function, assume the other
13437 unlocked functions exist explicitly. */
13438 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13439 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13440 }
13441 else
13442 {
13443 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13444 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13445 }
13446
13447 if (!init_target_chars ())
13448 return NULL_TREE;
13449
13450 /* If the format doesn't contain % args or %%, use strcpy. */
13451 if (strchr (fmt_str, target_percent) == NULL)
13452 {
13453 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13454 && arg)
13455 return NULL_TREE;
13456
13457 /* If the format specifier was "", fprintf does nothing. */
13458 if (fmt_str[0] == '\0')
13459 {
13460 /* If FP has side-effects, just wait until gimplification is
13461 done. */
13462 if (TREE_SIDE_EFFECTS (fp))
13463 return NULL_TREE;
13464
13465 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13466 }
13467
13468 /* When "string" doesn't contain %, replace all cases of
13469 fprintf (fp, string) with fputs (string, fp). The fputs
13470 builtin will take care of special cases like length == 1. */
13471 if (fn_fputs)
13472 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13473 }
13474
13475 /* The other optimizations can be done only on the non-va_list variants. */
13476 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13477 return NULL_TREE;
13478
13479 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13480 else if (strcmp (fmt_str, target_percent_s) == 0)
13481 {
13482 if (!arg || !validate_arg (arg, POINTER_TYPE))
13483 return NULL_TREE;
13484 if (fn_fputs)
13485 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13486 }
13487
13488 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13489 else if (strcmp (fmt_str, target_percent_c) == 0)
13490 {
13491 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13492 return NULL_TREE;
13493 if (fn_fputc)
13494 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13495 }
13496
13497 if (!call)
13498 return NULL_TREE;
13499 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13500 }
13501
13502 /* Initialize format string characters in the target charset. */
13503
13504 static bool
13505 init_target_chars (void)
13506 {
13507 static bool init;
13508 if (!init)
13509 {
13510 target_newline = lang_hooks.to_target_charset ('\n');
13511 target_percent = lang_hooks.to_target_charset ('%');
13512 target_c = lang_hooks.to_target_charset ('c');
13513 target_s = lang_hooks.to_target_charset ('s');
13514 if (target_newline == 0 || target_percent == 0 || target_c == 0
13515 || target_s == 0)
13516 return false;
13517
13518 target_percent_c[0] = target_percent;
13519 target_percent_c[1] = target_c;
13520 target_percent_c[2] = '\0';
13521
13522 target_percent_s[0] = target_percent;
13523 target_percent_s[1] = target_s;
13524 target_percent_s[2] = '\0';
13525
13526 target_percent_s_newline[0] = target_percent;
13527 target_percent_s_newline[1] = target_s;
13528 target_percent_s_newline[2] = target_newline;
13529 target_percent_s_newline[3] = '\0';
13530
13531 init = true;
13532 }
13533 return true;
13534 }
13535
13536 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13537 and no overflow/underflow occurred. INEXACT is true if M was not
13538 exactly calculated. TYPE is the tree type for the result. This
13539 function assumes that you cleared the MPFR flags and then
13540 calculated M to see if anything subsequently set a flag prior to
13541 entering this function. Return NULL_TREE if any checks fail. */
13542
13543 static tree
13544 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13545 {
13546 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13547 overflow/underflow occurred. If -frounding-math, proceed iff the
13548 result of calling FUNC was exact. */
13549 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13550 && (!flag_rounding_math || !inexact))
13551 {
13552 REAL_VALUE_TYPE rr;
13553
13554 real_from_mpfr (&rr, m, type, GMP_RNDN);
13555 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13556 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13557 but the mpft_t is not, then we underflowed in the
13558 conversion. */
13559 if (real_isfinite (&rr)
13560 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13561 {
13562 REAL_VALUE_TYPE rmode;
13563
13564 real_convert (&rmode, TYPE_MODE (type), &rr);
13565 /* Proceed iff the specified mode can hold the value. */
13566 if (real_identical (&rmode, &rr))
13567 return build_real (type, rmode);
13568 }
13569 }
13570 return NULL_TREE;
13571 }
13572
13573 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13574 number and no overflow/underflow occurred. INEXACT is true if M
13575 was not exactly calculated. TYPE is the tree type for the result.
13576 This function assumes that you cleared the MPFR flags and then
13577 calculated M to see if anything subsequently set a flag prior to
13578 entering this function. Return NULL_TREE if any checks fail, if
13579 FORCE_CONVERT is true, then bypass the checks. */
13580
13581 static tree
13582 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13583 {
13584 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13585 overflow/underflow occurred. If -frounding-math, proceed iff the
13586 result of calling FUNC was exact. */
13587 if (force_convert
13588 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13589 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13590 && (!flag_rounding_math || !inexact)))
13591 {
13592 REAL_VALUE_TYPE re, im;
13593
13594 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13595 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13596 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13597 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13598 but the mpft_t is not, then we underflowed in the
13599 conversion. */
13600 if (force_convert
13601 || (real_isfinite (&re) && real_isfinite (&im)
13602 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13603 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13604 {
13605 REAL_VALUE_TYPE re_mode, im_mode;
13606
13607 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13608 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13609 /* Proceed iff the specified mode can hold the value. */
13610 if (force_convert
13611 || (real_identical (&re_mode, &re)
13612 && real_identical (&im_mode, &im)))
13613 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13614 build_real (TREE_TYPE (type), im_mode));
13615 }
13616 }
13617 return NULL_TREE;
13618 }
13619
13620 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13621 FUNC on it and return the resulting value as a tree with type TYPE.
13622 If MIN and/or MAX are not NULL, then the supplied ARG must be
13623 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13624 acceptable values, otherwise they are not. The mpfr precision is
13625 set to the precision of TYPE. We assume that function FUNC returns
13626 zero if the result could be calculated exactly within the requested
13627 precision. */
13628
13629 static tree
13630 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13631 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13632 bool inclusive)
13633 {
13634 tree result = NULL_TREE;
13635
13636 STRIP_NOPS (arg);
13637
13638 /* To proceed, MPFR must exactly represent the target floating point
13639 format, which only happens when the target base equals two. */
13640 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13641 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13642 {
13643 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13644
13645 if (real_isfinite (ra)
13646 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13647 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13648 {
13649 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13650 const int prec = fmt->p;
13651 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13652 int inexact;
13653 mpfr_t m;
13654
13655 mpfr_init2 (m, prec);
13656 mpfr_from_real (m, ra, GMP_RNDN);
13657 mpfr_clear_flags ();
13658 inexact = func (m, m, rnd);
13659 result = do_mpfr_ckconv (m, type, inexact);
13660 mpfr_clear (m);
13661 }
13662 }
13663
13664 return result;
13665 }
13666
13667 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13668 FUNC on it and return the resulting value as a tree with type TYPE.
13669 The mpfr precision is set to the precision of TYPE. We assume that
13670 function FUNC returns zero if the result could be calculated
13671 exactly within the requested precision. */
13672
13673 static tree
13674 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13675 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13676 {
13677 tree result = NULL_TREE;
13678
13679 STRIP_NOPS (arg1);
13680 STRIP_NOPS (arg2);
13681
13682 /* To proceed, MPFR must exactly represent the target floating point
13683 format, which only happens when the target base equals two. */
13684 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13685 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13686 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13687 {
13688 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13689 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13690
13691 if (real_isfinite (ra1) && real_isfinite (ra2))
13692 {
13693 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13694 const int prec = fmt->p;
13695 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13696 int inexact;
13697 mpfr_t m1, m2;
13698
13699 mpfr_inits2 (prec, m1, m2, NULL);
13700 mpfr_from_real (m1, ra1, GMP_RNDN);
13701 mpfr_from_real (m2, ra2, GMP_RNDN);
13702 mpfr_clear_flags ();
13703 inexact = func (m1, m1, m2, rnd);
13704 result = do_mpfr_ckconv (m1, type, inexact);
13705 mpfr_clears (m1, m2, NULL);
13706 }
13707 }
13708
13709 return result;
13710 }
13711
13712 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13713 FUNC on it and return the resulting value as a tree with type TYPE.
13714 The mpfr precision is set to the precision of TYPE. We assume that
13715 function FUNC returns zero if the result could be calculated
13716 exactly within the requested precision. */
13717
13718 static tree
13719 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13720 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13721 {
13722 tree result = NULL_TREE;
13723
13724 STRIP_NOPS (arg1);
13725 STRIP_NOPS (arg2);
13726 STRIP_NOPS (arg3);
13727
13728 /* To proceed, MPFR must exactly represent the target floating point
13729 format, which only happens when the target base equals two. */
13730 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13731 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13732 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13733 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13734 {
13735 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13736 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13737 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13738
13739 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13740 {
13741 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13742 const int prec = fmt->p;
13743 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13744 int inexact;
13745 mpfr_t m1, m2, m3;
13746
13747 mpfr_inits2 (prec, m1, m2, m3, NULL);
13748 mpfr_from_real (m1, ra1, GMP_RNDN);
13749 mpfr_from_real (m2, ra2, GMP_RNDN);
13750 mpfr_from_real (m3, ra3, GMP_RNDN);
13751 mpfr_clear_flags ();
13752 inexact = func (m1, m1, m2, m3, rnd);
13753 result = do_mpfr_ckconv (m1, type, inexact);
13754 mpfr_clears (m1, m2, m3, NULL);
13755 }
13756 }
13757
13758 return result;
13759 }
13760
13761 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13762 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13763 If ARG_SINP and ARG_COSP are NULL then the result is returned
13764 as a complex value.
13765 The type is taken from the type of ARG and is used for setting the
13766 precision of the calculation and results. */
13767
13768 static tree
13769 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13770 {
13771 tree const type = TREE_TYPE (arg);
13772 tree result = NULL_TREE;
13773
13774 STRIP_NOPS (arg);
13775
13776 /* To proceed, MPFR must exactly represent the target floating point
13777 format, which only happens when the target base equals two. */
13778 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13779 && TREE_CODE (arg) == REAL_CST
13780 && !TREE_OVERFLOW (arg))
13781 {
13782 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13783
13784 if (real_isfinite (ra))
13785 {
13786 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13787 const int prec = fmt->p;
13788 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13789 tree result_s, result_c;
13790 int inexact;
13791 mpfr_t m, ms, mc;
13792
13793 mpfr_inits2 (prec, m, ms, mc, NULL);
13794 mpfr_from_real (m, ra, GMP_RNDN);
13795 mpfr_clear_flags ();
13796 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13797 result_s = do_mpfr_ckconv (ms, type, inexact);
13798 result_c = do_mpfr_ckconv (mc, type, inexact);
13799 mpfr_clears (m, ms, mc, NULL);
13800 if (result_s && result_c)
13801 {
13802 /* If we are to return in a complex value do so. */
13803 if (!arg_sinp && !arg_cosp)
13804 return build_complex (build_complex_type (type),
13805 result_c, result_s);
13806
13807 /* Dereference the sin/cos pointer arguments. */
13808 arg_sinp = build_fold_indirect_ref (arg_sinp);
13809 arg_cosp = build_fold_indirect_ref (arg_cosp);
13810 /* Proceed if valid pointer type were passed in. */
13811 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13812 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13813 {
13814 /* Set the values. */
13815 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13816 result_s);
13817 TREE_SIDE_EFFECTS (result_s) = 1;
13818 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13819 result_c);
13820 TREE_SIDE_EFFECTS (result_c) = 1;
13821 /* Combine the assignments into a compound expr. */
13822 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13823 result_s, result_c));
13824 }
13825 }
13826 }
13827 }
13828 return result;
13829 }
13830
13831 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13832 two-argument mpfr order N Bessel function FUNC on them and return
13833 the resulting value as a tree with type TYPE. The mpfr precision
13834 is set to the precision of TYPE. We assume that function FUNC
13835 returns zero if the result could be calculated exactly within the
13836 requested precision. */
13837 static tree
13838 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13839 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13840 const REAL_VALUE_TYPE *min, bool inclusive)
13841 {
13842 tree result = NULL_TREE;
13843
13844 STRIP_NOPS (arg1);
13845 STRIP_NOPS (arg2);
13846
13847 /* To proceed, MPFR must exactly represent the target floating point
13848 format, which only happens when the target base equals two. */
13849 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13850 && tree_fits_shwi_p (arg1)
13851 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13852 {
13853 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13854 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13855
13856 if (n == (long)n
13857 && real_isfinite (ra)
13858 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13859 {
13860 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13861 const int prec = fmt->p;
13862 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13863 int inexact;
13864 mpfr_t m;
13865
13866 mpfr_init2 (m, prec);
13867 mpfr_from_real (m, ra, GMP_RNDN);
13868 mpfr_clear_flags ();
13869 inexact = func (m, n, m, rnd);
13870 result = do_mpfr_ckconv (m, type, inexact);
13871 mpfr_clear (m);
13872 }
13873 }
13874
13875 return result;
13876 }
13877
13878 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13879 the pointer *(ARG_QUO) and return the result. The type is taken
13880 from the type of ARG0 and is used for setting the precision of the
13881 calculation and results. */
13882
13883 static tree
13884 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13885 {
13886 tree const type = TREE_TYPE (arg0);
13887 tree result = NULL_TREE;
13888
13889 STRIP_NOPS (arg0);
13890 STRIP_NOPS (arg1);
13891
13892 /* To proceed, MPFR must exactly represent the target floating point
13893 format, which only happens when the target base equals two. */
13894 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13895 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13896 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13897 {
13898 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13899 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13900
13901 if (real_isfinite (ra0) && real_isfinite (ra1))
13902 {
13903 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13904 const int prec = fmt->p;
13905 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13906 tree result_rem;
13907 long integer_quo;
13908 mpfr_t m0, m1;
13909
13910 mpfr_inits2 (prec, m0, m1, NULL);
13911 mpfr_from_real (m0, ra0, GMP_RNDN);
13912 mpfr_from_real (m1, ra1, GMP_RNDN);
13913 mpfr_clear_flags ();
13914 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13915 /* Remquo is independent of the rounding mode, so pass
13916 inexact=0 to do_mpfr_ckconv(). */
13917 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13918 mpfr_clears (m0, m1, NULL);
13919 if (result_rem)
13920 {
13921 /* MPFR calculates quo in the host's long so it may
13922 return more bits in quo than the target int can hold
13923 if sizeof(host long) > sizeof(target int). This can
13924 happen even for native compilers in LP64 mode. In
13925 these cases, modulo the quo value with the largest
13926 number that the target int can hold while leaving one
13927 bit for the sign. */
13928 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13929 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13930
13931 /* Dereference the quo pointer argument. */
13932 arg_quo = build_fold_indirect_ref (arg_quo);
13933 /* Proceed iff a valid pointer type was passed in. */
13934 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13935 {
13936 /* Set the value. */
13937 tree result_quo
13938 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13939 build_int_cst (TREE_TYPE (arg_quo),
13940 integer_quo));
13941 TREE_SIDE_EFFECTS (result_quo) = 1;
13942 /* Combine the quo assignment with the rem. */
13943 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13944 result_quo, result_rem));
13945 }
13946 }
13947 }
13948 }
13949 return result;
13950 }
13951
13952 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13953 resulting value as a tree with type TYPE. The mpfr precision is
13954 set to the precision of TYPE. We assume that this mpfr function
13955 returns zero if the result could be calculated exactly within the
13956 requested precision. In addition, the integer pointer represented
13957 by ARG_SG will be dereferenced and set to the appropriate signgam
13958 (-1,1) value. */
13959
13960 static tree
13961 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13962 {
13963 tree result = NULL_TREE;
13964
13965 STRIP_NOPS (arg);
13966
13967 /* To proceed, MPFR must exactly represent the target floating point
13968 format, which only happens when the target base equals two. Also
13969 verify ARG is a constant and that ARG_SG is an int pointer. */
13970 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13971 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13972 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13973 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13974 {
13975 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13976
13977 /* In addition to NaN and Inf, the argument cannot be zero or a
13978 negative integer. */
13979 if (real_isfinite (ra)
13980 && ra->cl != rvc_zero
13981 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13982 {
13983 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13984 const int prec = fmt->p;
13985 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13986 int inexact, sg;
13987 mpfr_t m;
13988 tree result_lg;
13989
13990 mpfr_init2 (m, prec);
13991 mpfr_from_real (m, ra, GMP_RNDN);
13992 mpfr_clear_flags ();
13993 inexact = mpfr_lgamma (m, &sg, m, rnd);
13994 result_lg = do_mpfr_ckconv (m, type, inexact);
13995 mpfr_clear (m);
13996 if (result_lg)
13997 {
13998 tree result_sg;
13999
14000 /* Dereference the arg_sg pointer argument. */
14001 arg_sg = build_fold_indirect_ref (arg_sg);
14002 /* Assign the signgam value into *arg_sg. */
14003 result_sg = fold_build2 (MODIFY_EXPR,
14004 TREE_TYPE (arg_sg), arg_sg,
14005 build_int_cst (TREE_TYPE (arg_sg), sg));
14006 TREE_SIDE_EFFECTS (result_sg) = 1;
14007 /* Combine the signgam assignment with the lgamma result. */
14008 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14009 result_sg, result_lg));
14010 }
14011 }
14012 }
14013
14014 return result;
14015 }
14016
14017 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14018 function FUNC on it and return the resulting value as a tree with
14019 type TYPE. The mpfr precision is set to the precision of TYPE. We
14020 assume that function FUNC returns zero if the result could be
14021 calculated exactly within the requested precision. */
14022
14023 static tree
14024 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14025 {
14026 tree result = NULL_TREE;
14027
14028 STRIP_NOPS (arg);
14029
14030 /* To proceed, MPFR must exactly represent the target floating point
14031 format, which only happens when the target base equals two. */
14032 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14033 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14034 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14035 {
14036 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14037 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14038
14039 if (real_isfinite (re) && real_isfinite (im))
14040 {
14041 const struct real_format *const fmt =
14042 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14043 const int prec = fmt->p;
14044 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14045 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14046 int inexact;
14047 mpc_t m;
14048
14049 mpc_init2 (m, prec);
14050 mpfr_from_real (mpc_realref (m), re, rnd);
14051 mpfr_from_real (mpc_imagref (m), im, rnd);
14052 mpfr_clear_flags ();
14053 inexact = func (m, m, crnd);
14054 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14055 mpc_clear (m);
14056 }
14057 }
14058
14059 return result;
14060 }
14061
14062 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14063 mpc function FUNC on it and return the resulting value as a tree
14064 with type TYPE. The mpfr precision is set to the precision of
14065 TYPE. We assume that function FUNC returns zero if the result
14066 could be calculated exactly within the requested precision. If
14067 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14068 in the arguments and/or results. */
14069
14070 tree
14071 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14072 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14073 {
14074 tree result = NULL_TREE;
14075
14076 STRIP_NOPS (arg0);
14077 STRIP_NOPS (arg1);
14078
14079 /* To proceed, MPFR must exactly represent the target floating point
14080 format, which only happens when the target base equals two. */
14081 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14082 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14083 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14084 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14085 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14086 {
14087 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14088 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14089 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14090 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14091
14092 if (do_nonfinite
14093 || (real_isfinite (re0) && real_isfinite (im0)
14094 && real_isfinite (re1) && real_isfinite (im1)))
14095 {
14096 const struct real_format *const fmt =
14097 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14098 const int prec = fmt->p;
14099 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14100 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14101 int inexact;
14102 mpc_t m0, m1;
14103
14104 mpc_init2 (m0, prec);
14105 mpc_init2 (m1, prec);
14106 mpfr_from_real (mpc_realref (m0), re0, rnd);
14107 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14108 mpfr_from_real (mpc_realref (m1), re1, rnd);
14109 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14110 mpfr_clear_flags ();
14111 inexact = func (m0, m0, m1, crnd);
14112 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14113 mpc_clear (m0);
14114 mpc_clear (m1);
14115 }
14116 }
14117
14118 return result;
14119 }
14120
14121 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14122 a normal call should be emitted rather than expanding the function
14123 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14124
14125 static tree
14126 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14127 {
14128 int nargs = gimple_call_num_args (stmt);
14129
14130 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14131 (nargs > 0
14132 ? gimple_call_arg_ptr (stmt, 0)
14133 : &error_mark_node), fcode);
14134 }
14135
14136 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14137 a normal call should be emitted rather than expanding the function
14138 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14139 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14140 passed as second argument. */
14141
14142 tree
14143 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14144 enum built_in_function fcode)
14145 {
14146 int nargs = gimple_call_num_args (stmt);
14147
14148 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14149 (nargs > 0
14150 ? gimple_call_arg_ptr (stmt, 0)
14151 : &error_mark_node), maxlen, fcode);
14152 }
14153
14154 /* Builtins with folding operations that operate on "..." arguments
14155 need special handling; we need to store the arguments in a convenient
14156 data structure before attempting any folding. Fortunately there are
14157 only a few builtins that fall into this category. FNDECL is the
14158 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14159 result of the function call is ignored. */
14160
14161 static tree
14162 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14163 bool ignore ATTRIBUTE_UNUSED)
14164 {
14165 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14166 tree ret = NULL_TREE;
14167
14168 switch (fcode)
14169 {
14170 case BUILT_IN_SPRINTF_CHK:
14171 case BUILT_IN_VSPRINTF_CHK:
14172 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14173 break;
14174
14175 case BUILT_IN_SNPRINTF_CHK:
14176 case BUILT_IN_VSNPRINTF_CHK:
14177 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14178
14179 default:
14180 break;
14181 }
14182 if (ret)
14183 {
14184 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14185 TREE_NO_WARNING (ret) = 1;
14186 return ret;
14187 }
14188 return NULL_TREE;
14189 }
14190
14191 /* A wrapper function for builtin folding that prevents warnings for
14192 "statement without effect" and the like, caused by removing the
14193 call node earlier than the warning is generated. */
14194
14195 tree
14196 fold_call_stmt (gimple stmt, bool ignore)
14197 {
14198 tree ret = NULL_TREE;
14199 tree fndecl = gimple_call_fndecl (stmt);
14200 location_t loc = gimple_location (stmt);
14201 if (fndecl
14202 && TREE_CODE (fndecl) == FUNCTION_DECL
14203 && DECL_BUILT_IN (fndecl)
14204 && !gimple_call_va_arg_pack_p (stmt))
14205 {
14206 int nargs = gimple_call_num_args (stmt);
14207 tree *args = (nargs > 0
14208 ? gimple_call_arg_ptr (stmt, 0)
14209 : &error_mark_node);
14210
14211 if (avoid_folding_inline_builtin (fndecl))
14212 return NULL_TREE;
14213 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14214 {
14215 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14216 }
14217 else
14218 {
14219 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14220 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14221 if (!ret)
14222 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14223 if (ret)
14224 {
14225 /* Propagate location information from original call to
14226 expansion of builtin. Otherwise things like
14227 maybe_emit_chk_warning, that operate on the expansion
14228 of a builtin, will use the wrong location information. */
14229 if (gimple_has_location (stmt))
14230 {
14231 tree realret = ret;
14232 if (TREE_CODE (ret) == NOP_EXPR)
14233 realret = TREE_OPERAND (ret, 0);
14234 if (CAN_HAVE_LOCATION_P (realret)
14235 && !EXPR_HAS_LOCATION (realret))
14236 SET_EXPR_LOCATION (realret, loc);
14237 return realret;
14238 }
14239 return ret;
14240 }
14241 }
14242 }
14243 return NULL_TREE;
14244 }
14245
14246 /* Look up the function in builtin_decl that corresponds to DECL
14247 and set ASMSPEC as its user assembler name. DECL must be a
14248 function decl that declares a builtin. */
14249
14250 void
14251 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14252 {
14253 tree builtin;
14254 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14255 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14256 && asmspec != 0);
14257
14258 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14259 set_user_assembler_name (builtin, asmspec);
14260 switch (DECL_FUNCTION_CODE (decl))
14261 {
14262 case BUILT_IN_MEMCPY:
14263 init_block_move_fn (asmspec);
14264 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14265 break;
14266 case BUILT_IN_MEMSET:
14267 init_block_clear_fn (asmspec);
14268 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14269 break;
14270 case BUILT_IN_MEMMOVE:
14271 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14272 break;
14273 case BUILT_IN_MEMCMP:
14274 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14275 break;
14276 case BUILT_IN_ABORT:
14277 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14278 break;
14279 case BUILT_IN_FFS:
14280 if (INT_TYPE_SIZE < BITS_PER_WORD)
14281 {
14282 set_user_assembler_libfunc ("ffs", asmspec);
14283 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14284 MODE_INT, 0), "ffs");
14285 }
14286 break;
14287 default:
14288 break;
14289 }
14290 }
14291
14292 /* Return true if DECL is a builtin that expands to a constant or similarly
14293 simple code. */
14294 bool
14295 is_simple_builtin (tree decl)
14296 {
14297 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14298 switch (DECL_FUNCTION_CODE (decl))
14299 {
14300 /* Builtins that expand to constants. */
14301 case BUILT_IN_CONSTANT_P:
14302 case BUILT_IN_EXPECT:
14303 case BUILT_IN_OBJECT_SIZE:
14304 case BUILT_IN_UNREACHABLE:
14305 /* Simple register moves or loads from stack. */
14306 case BUILT_IN_ASSUME_ALIGNED:
14307 case BUILT_IN_RETURN_ADDRESS:
14308 case BUILT_IN_EXTRACT_RETURN_ADDR:
14309 case BUILT_IN_FROB_RETURN_ADDR:
14310 case BUILT_IN_RETURN:
14311 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14312 case BUILT_IN_FRAME_ADDRESS:
14313 case BUILT_IN_VA_END:
14314 case BUILT_IN_STACK_SAVE:
14315 case BUILT_IN_STACK_RESTORE:
14316 /* Exception state returns or moves registers around. */
14317 case BUILT_IN_EH_FILTER:
14318 case BUILT_IN_EH_POINTER:
14319 case BUILT_IN_EH_COPY_VALUES:
14320 return true;
14321
14322 default:
14323 return false;
14324 }
14325
14326 return false;
14327 }
14328
14329 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14330 most probably expanded inline into reasonably simple code. This is a
14331 superset of is_simple_builtin. */
14332 bool
14333 is_inexpensive_builtin (tree decl)
14334 {
14335 if (!decl)
14336 return false;
14337 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14338 return true;
14339 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14340 switch (DECL_FUNCTION_CODE (decl))
14341 {
14342 case BUILT_IN_ABS:
14343 case BUILT_IN_ALLOCA:
14344 case BUILT_IN_ALLOCA_WITH_ALIGN:
14345 case BUILT_IN_BSWAP16:
14346 case BUILT_IN_BSWAP32:
14347 case BUILT_IN_BSWAP64:
14348 case BUILT_IN_CLZ:
14349 case BUILT_IN_CLZIMAX:
14350 case BUILT_IN_CLZL:
14351 case BUILT_IN_CLZLL:
14352 case BUILT_IN_CTZ:
14353 case BUILT_IN_CTZIMAX:
14354 case BUILT_IN_CTZL:
14355 case BUILT_IN_CTZLL:
14356 case BUILT_IN_FFS:
14357 case BUILT_IN_FFSIMAX:
14358 case BUILT_IN_FFSL:
14359 case BUILT_IN_FFSLL:
14360 case BUILT_IN_IMAXABS:
14361 case BUILT_IN_FINITE:
14362 case BUILT_IN_FINITEF:
14363 case BUILT_IN_FINITEL:
14364 case BUILT_IN_FINITED32:
14365 case BUILT_IN_FINITED64:
14366 case BUILT_IN_FINITED128:
14367 case BUILT_IN_FPCLASSIFY:
14368 case BUILT_IN_ISFINITE:
14369 case BUILT_IN_ISINF_SIGN:
14370 case BUILT_IN_ISINF:
14371 case BUILT_IN_ISINFF:
14372 case BUILT_IN_ISINFL:
14373 case BUILT_IN_ISINFD32:
14374 case BUILT_IN_ISINFD64:
14375 case BUILT_IN_ISINFD128:
14376 case BUILT_IN_ISNAN:
14377 case BUILT_IN_ISNANF:
14378 case BUILT_IN_ISNANL:
14379 case BUILT_IN_ISNAND32:
14380 case BUILT_IN_ISNAND64:
14381 case BUILT_IN_ISNAND128:
14382 case BUILT_IN_ISNORMAL:
14383 case BUILT_IN_ISGREATER:
14384 case BUILT_IN_ISGREATEREQUAL:
14385 case BUILT_IN_ISLESS:
14386 case BUILT_IN_ISLESSEQUAL:
14387 case BUILT_IN_ISLESSGREATER:
14388 case BUILT_IN_ISUNORDERED:
14389 case BUILT_IN_VA_ARG_PACK:
14390 case BUILT_IN_VA_ARG_PACK_LEN:
14391 case BUILT_IN_VA_COPY:
14392 case BUILT_IN_TRAP:
14393 case BUILT_IN_SAVEREGS:
14394 case BUILT_IN_POPCOUNTL:
14395 case BUILT_IN_POPCOUNTLL:
14396 case BUILT_IN_POPCOUNTIMAX:
14397 case BUILT_IN_POPCOUNT:
14398 case BUILT_IN_PARITYL:
14399 case BUILT_IN_PARITYLL:
14400 case BUILT_IN_PARITYIMAX:
14401 case BUILT_IN_PARITY:
14402 case BUILT_IN_LABS:
14403 case BUILT_IN_LLABS:
14404 case BUILT_IN_PREFETCH:
14405 return true;
14406
14407 default:
14408 return is_simple_builtin (decl);
14409 }
14410
14411 return false;
14412 }