builtins.c (expand_movstr): Check movstr expand done or fail.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
62
63
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
65
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
70
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
74
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
77 {
78 #include "builtins.def"
79 };
80 #undef DEF_BUILTIN
81
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
85
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
88
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree, tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static void expand_builtin_update_setjmp_buf (rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strcmp (tree, rtx);
122 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
123 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_expect (location_t, tree, tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree fold_builtin_nan (tree, tree, int);
148 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
149 static bool validate_arg (const_tree, enum tree_code code);
150 static bool integer_valued_real_p (tree);
151 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
152 static bool readonly_data_expr (tree);
153 static rtx expand_builtin_fabs (tree, rtx, rtx);
154 static rtx expand_builtin_signbit (tree, rtx);
155 static tree fold_builtin_sqrt (location_t, tree, tree);
156 static tree fold_builtin_cbrt (location_t, tree, tree);
157 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
159 static tree fold_builtin_cos (location_t, tree, tree, tree);
160 static tree fold_builtin_cosh (location_t, tree, tree, tree);
161 static tree fold_builtin_tan (tree, tree);
162 static tree fold_builtin_trunc (location_t, tree, tree);
163 static tree fold_builtin_floor (location_t, tree, tree);
164 static tree fold_builtin_ceil (location_t, tree, tree);
165 static tree fold_builtin_round (location_t, tree, tree);
166 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
167 static tree fold_builtin_bitop (tree, tree);
168 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
169 static tree fold_builtin_strchr (location_t, tree, tree, tree);
170 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
172 static tree fold_builtin_strcmp (location_t, tree, tree);
173 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
174 static tree fold_builtin_signbit (location_t, tree, tree);
175 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_isascii (location_t, tree);
177 static tree fold_builtin_toascii (location_t, tree);
178 static tree fold_builtin_isdigit (location_t, tree);
179 static tree fold_builtin_fabs (location_t, tree, tree);
180 static tree fold_builtin_abs (location_t, tree, tree);
181 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
182 enum tree_code);
183 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
184 static tree fold_builtin_0 (location_t, tree, bool);
185 static tree fold_builtin_1 (location_t, tree, tree, bool);
186 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
187 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
188 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
189 static tree fold_builtin_varargs (location_t, tree, tree, bool);
190
191 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
192 static tree fold_builtin_strstr (location_t, tree, tree, tree);
193 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
194 static tree fold_builtin_strcat (location_t, tree, tree);
195 static tree fold_builtin_strncat (location_t, tree, tree, tree);
196 static tree fold_builtin_strspn (location_t, tree, tree);
197 static tree fold_builtin_strcspn (location_t, tree, tree);
198 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
199 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
200
201 static rtx expand_builtin_object_size (tree);
202 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
203 enum built_in_function);
204 static void maybe_emit_chk_warning (tree, enum built_in_function);
205 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
206 static void maybe_emit_free_warning (tree);
207 static tree fold_builtin_object_size (tree, tree);
208 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
209 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
210 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
211 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
212 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
213 enum built_in_function);
214 static bool init_target_chars (void);
215
216 static unsigned HOST_WIDE_INT target_newline;
217 static unsigned HOST_WIDE_INT target_percent;
218 static unsigned HOST_WIDE_INT target_c;
219 static unsigned HOST_WIDE_INT target_s;
220 static char target_percent_c[3];
221 static char target_percent_s[3];
222 static char target_percent_s_newline[4];
223 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_arg2 (tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_arg3 (tree, tree, tree, tree,
228 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
229 static tree do_mpfr_sincos (tree, tree, tree);
230 static tree do_mpfr_bessel_n (tree, tree, tree,
231 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_remquo (tree, tree, tree);
234 static tree do_mpfr_lgamma_r (tree, tree, tree);
235 static void expand_builtin_sync_synchronize (void);
236
237 /* Return true if NAME starts with __builtin_ or __sync_. */
238
239 static bool
240 is_builtin_name (const char *name)
241 {
242 if (strncmp (name, "__builtin_", 10) == 0)
243 return true;
244 if (strncmp (name, "__sync_", 7) == 0)
245 return true;
246 if (strncmp (name, "__atomic_", 9) == 0)
247 return true;
248 if (flag_enable_cilkplus
249 && (!strcmp (name, "__cilkrts_detach")
250 || !strcmp (name, "__cilkrts_pop_frame")))
251 return true;
252 return false;
253 }
254
255
256 /* Return true if DECL is a function symbol representing a built-in. */
257
258 bool
259 is_builtin_fn (tree decl)
260 {
261 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
262 }
263
264 /* By default we assume that c99 functions are present at the runtime,
265 but sincos is not. */
266 bool
267 default_libc_has_function (enum function_class fn_class)
268 {
269 if (fn_class == function_c94
270 || fn_class == function_c99_misc
271 || fn_class == function_c99_math_complex)
272 return true;
273
274 return false;
275 }
276
277 bool
278 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
279 {
280 return true;
281 }
282
283 bool
284 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
285 {
286 return false;
287 }
288
289 /* Return true if NODE should be considered for inline expansion regardless
290 of the optimization level. This means whenever a function is invoked with
291 its "internal" name, which normally contains the prefix "__builtin". */
292
293 static bool
294 called_as_built_in (tree node)
295 {
296 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
297 we want the name used to call the function, not the name it
298 will have. */
299 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
300 return is_builtin_name (name);
301 }
302
303 /* Compute values M and N such that M divides (address of EXP - N) and such
304 that N < M. If these numbers can be determined, store M in alignp and N in
305 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
306 *alignp and any bit-offset to *bitposp.
307
308 Note that the address (and thus the alignment) computed here is based
309 on the address to which a symbol resolves, whereas DECL_ALIGN is based
310 on the address at which an object is actually located. These two
311 addresses are not always the same. For example, on ARM targets,
312 the address &foo of a Thumb function foo() has the lowest bit set,
313 whereas foo() itself starts on an even address.
314
315 If ADDR_P is true we are taking the address of the memory reference EXP
316 and thus cannot rely on the access taking place. */
317
318 static bool
319 get_object_alignment_2 (tree exp, unsigned int *alignp,
320 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
321 {
322 HOST_WIDE_INT bitsize, bitpos;
323 tree offset;
324 enum machine_mode mode;
325 int unsignedp, volatilep;
326 unsigned int align = BITS_PER_UNIT;
327 bool known_alignment = false;
328
329 /* Get the innermost object and the constant (bitpos) and possibly
330 variable (offset) offset of the access. */
331 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
332 &mode, &unsignedp, &volatilep, true);
333
334 /* Extract alignment information from the innermost object and
335 possibly adjust bitpos and offset. */
336 if (TREE_CODE (exp) == FUNCTION_DECL)
337 {
338 /* Function addresses can encode extra information besides their
339 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
340 allows the low bit to be used as a virtual bit, we know
341 that the address itself must be at least 2-byte aligned. */
342 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
343 align = 2 * BITS_PER_UNIT;
344 }
345 else if (TREE_CODE (exp) == LABEL_DECL)
346 ;
347 else if (TREE_CODE (exp) == CONST_DECL)
348 {
349 /* The alignment of a CONST_DECL is determined by its initializer. */
350 exp = DECL_INITIAL (exp);
351 align = TYPE_ALIGN (TREE_TYPE (exp));
352 #ifdef CONSTANT_ALIGNMENT
353 if (CONSTANT_CLASS_P (exp))
354 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
355 #endif
356 known_alignment = true;
357 }
358 else if (DECL_P (exp))
359 {
360 align = DECL_ALIGN (exp);
361 known_alignment = true;
362 }
363 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
364 {
365 align = TYPE_ALIGN (TREE_TYPE (exp));
366 }
367 else if (TREE_CODE (exp) == INDIRECT_REF
368 || TREE_CODE (exp) == MEM_REF
369 || TREE_CODE (exp) == TARGET_MEM_REF)
370 {
371 tree addr = TREE_OPERAND (exp, 0);
372 unsigned ptr_align;
373 unsigned HOST_WIDE_INT ptr_bitpos;
374
375 if (TREE_CODE (addr) == BIT_AND_EXPR
376 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
377 {
378 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
379 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
380 align *= BITS_PER_UNIT;
381 addr = TREE_OPERAND (addr, 0);
382 }
383
384 known_alignment
385 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
386 align = MAX (ptr_align, align);
387
388 /* The alignment of the pointer operand in a TARGET_MEM_REF
389 has to take the variable offset parts into account. */
390 if (TREE_CODE (exp) == TARGET_MEM_REF)
391 {
392 if (TMR_INDEX (exp))
393 {
394 unsigned HOST_WIDE_INT step = 1;
395 if (TMR_STEP (exp))
396 step = TREE_INT_CST_LOW (TMR_STEP (exp));
397 align = MIN (align, (step & -step) * BITS_PER_UNIT);
398 }
399 if (TMR_INDEX2 (exp))
400 align = BITS_PER_UNIT;
401 known_alignment = false;
402 }
403
404 /* When EXP is an actual memory reference then we can use
405 TYPE_ALIGN of a pointer indirection to derive alignment.
406 Do so only if get_pointer_alignment_1 did not reveal absolute
407 alignment knowledge and if using that alignment would
408 improve the situation. */
409 if (!addr_p && !known_alignment
410 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
411 align = TYPE_ALIGN (TREE_TYPE (exp));
412 else
413 {
414 /* Else adjust bitpos accordingly. */
415 bitpos += ptr_bitpos;
416 if (TREE_CODE (exp) == MEM_REF
417 || TREE_CODE (exp) == TARGET_MEM_REF)
418 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
419 }
420 }
421 else if (TREE_CODE (exp) == STRING_CST)
422 {
423 /* STRING_CST are the only constant objects we allow to be not
424 wrapped inside a CONST_DECL. */
425 align = TYPE_ALIGN (TREE_TYPE (exp));
426 #ifdef CONSTANT_ALIGNMENT
427 if (CONSTANT_CLASS_P (exp))
428 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
429 #endif
430 known_alignment = true;
431 }
432
433 /* If there is a non-constant offset part extract the maximum
434 alignment that can prevail. */
435 if (offset)
436 {
437 unsigned int trailing_zeros = tree_ctz (offset);
438 if (trailing_zeros < HOST_BITS_PER_INT)
439 {
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
443 }
444 }
445
446 *alignp = align;
447 *bitposp = bitpos & (*alignp - 1);
448 return known_alignment;
449 }
450
451 /* For a memory reference expression EXP compute values M and N such that M
452 divides (&EXP - N) and such that N < M. If these numbers can be determined,
453 store M in alignp and N in *BITPOSP and return true. Otherwise return false
454 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
455
456 bool
457 get_object_alignment_1 (tree exp, unsigned int *alignp,
458 unsigned HOST_WIDE_INT *bitposp)
459 {
460 return get_object_alignment_2 (exp, alignp, bitposp, false);
461 }
462
463 /* Return the alignment in bits of EXP, an object. */
464
465 unsigned int
466 get_object_alignment (tree exp)
467 {
468 unsigned HOST_WIDE_INT bitpos = 0;
469 unsigned int align;
470
471 get_object_alignment_1 (exp, &align, &bitpos);
472
473 /* align and bitpos now specify known low bits of the pointer.
474 ptr & (align - 1) == bitpos. */
475
476 if (bitpos != 0)
477 align = (bitpos & -bitpos);
478 return align;
479 }
480
481 /* For a pointer valued expression EXP compute values M and N such that M
482 divides (EXP - N) and such that N < M. If these numbers can be determined,
483 store M in alignp and N in *BITPOSP and return true. Return false if
484 the results are just a conservative approximation.
485
486 If EXP is not a pointer, false is returned too. */
487
488 bool
489 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
490 unsigned HOST_WIDE_INT *bitposp)
491 {
492 STRIP_NOPS (exp);
493
494 if (TREE_CODE (exp) == ADDR_EXPR)
495 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
496 alignp, bitposp, true);
497 else if (TREE_CODE (exp) == SSA_NAME
498 && POINTER_TYPE_P (TREE_TYPE (exp)))
499 {
500 unsigned int ptr_align, ptr_misalign;
501 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
502
503 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
504 {
505 *bitposp = ptr_misalign * BITS_PER_UNIT;
506 *alignp = ptr_align * BITS_PER_UNIT;
507 /* We cannot really tell whether this result is an approximation. */
508 return true;
509 }
510 else
511 {
512 *bitposp = 0;
513 *alignp = BITS_PER_UNIT;
514 return false;
515 }
516 }
517 else if (TREE_CODE (exp) == INTEGER_CST)
518 {
519 *alignp = BIGGEST_ALIGNMENT;
520 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
521 & (BIGGEST_ALIGNMENT - 1));
522 return true;
523 }
524
525 *bitposp = 0;
526 *alignp = BITS_PER_UNIT;
527 return false;
528 }
529
530 /* Return the alignment in bits of EXP, a pointer valued expression.
531 The alignment returned is, by default, the alignment of the thing that
532 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
533
534 Otherwise, look at the expression to see if we can do better, i.e., if the
535 expression is actually pointing at an object whose alignment is tighter. */
536
537 unsigned int
538 get_pointer_alignment (tree exp)
539 {
540 unsigned HOST_WIDE_INT bitpos = 0;
541 unsigned int align;
542
543 get_pointer_alignment_1 (exp, &align, &bitpos);
544
545 /* align and bitpos now specify known low bits of the pointer.
546 ptr & (align - 1) == bitpos. */
547
548 if (bitpos != 0)
549 align = (bitpos & -bitpos);
550
551 return align;
552 }
553
554 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
555 way, because it could contain a zero byte in the middle.
556 TREE_STRING_LENGTH is the size of the character array, not the string.
557
558 ONLY_VALUE should be nonzero if the result is not going to be emitted
559 into the instruction stream and zero if it is going to be expanded.
560 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
561 is returned, otherwise NULL, since
562 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
563 evaluate the side-effects.
564
565 The value returned is of type `ssizetype'.
566
567 Unfortunately, string_constant can't access the values of const char
568 arrays with initializers, so neither can we do so here. */
569
570 tree
571 c_strlen (tree src, int only_value)
572 {
573 tree offset_node;
574 HOST_WIDE_INT offset;
575 int max;
576 const char *ptr;
577 location_t loc;
578
579 STRIP_NOPS (src);
580 if (TREE_CODE (src) == COND_EXPR
581 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
582 {
583 tree len1, len2;
584
585 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
586 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
587 if (tree_int_cst_equal (len1, len2))
588 return len1;
589 }
590
591 if (TREE_CODE (src) == COMPOUND_EXPR
592 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
593 return c_strlen (TREE_OPERAND (src, 1), only_value);
594
595 loc = EXPR_LOC_OR_LOC (src, input_location);
596
597 src = string_constant (src, &offset_node);
598 if (src == 0)
599 return NULL_TREE;
600
601 max = TREE_STRING_LENGTH (src) - 1;
602 ptr = TREE_STRING_POINTER (src);
603
604 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
605 {
606 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
607 compute the offset to the following null if we don't know where to
608 start searching for it. */
609 int i;
610
611 for (i = 0; i < max; i++)
612 if (ptr[i] == 0)
613 return NULL_TREE;
614
615 /* We don't know the starting offset, but we do know that the string
616 has no internal zero bytes. We can assume that the offset falls
617 within the bounds of the string; otherwise, the programmer deserves
618 what he gets. Subtract the offset from the length of the string,
619 and return that. This would perhaps not be valid if we were dealing
620 with named arrays in addition to literal string constants. */
621
622 return size_diffop_loc (loc, size_int (max), offset_node);
623 }
624
625 /* We have a known offset into the string. Start searching there for
626 a null character if we can represent it as a single HOST_WIDE_INT. */
627 if (offset_node == 0)
628 offset = 0;
629 else if (! tree_fits_shwi_p (offset_node))
630 offset = -1;
631 else
632 offset = tree_to_shwi (offset_node);
633
634 /* If the offset is known to be out of bounds, warn, and call strlen at
635 runtime. */
636 if (offset < 0 || offset > max)
637 {
638 /* Suppress multiple warnings for propagated constant strings. */
639 if (! TREE_NO_WARNING (src))
640 {
641 warning_at (loc, 0, "offset outside bounds of constant string");
642 TREE_NO_WARNING (src) = 1;
643 }
644 return NULL_TREE;
645 }
646
647 /* Use strlen to search for the first zero byte. Since any strings
648 constructed with build_string will have nulls appended, we win even
649 if we get handed something like (char[4])"abcd".
650
651 Since OFFSET is our starting index into the string, no further
652 calculation is needed. */
653 return ssize_int (strlen (ptr + offset));
654 }
655
656 /* Return a char pointer for a C string if it is a string constant
657 or sum of string constant and integer constant. */
658
659 static const char *
660 c_getstr (tree src)
661 {
662 tree offset_node;
663
664 src = string_constant (src, &offset_node);
665 if (src == 0)
666 return 0;
667
668 if (offset_node == 0)
669 return TREE_STRING_POINTER (src);
670 else if (!tree_fits_uhwi_p (offset_node)
671 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
672 return 0;
673
674 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
675 }
676
677 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
678 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
679
680 static rtx
681 c_readstr (const char *str, enum machine_mode mode)
682 {
683 HOST_WIDE_INT c[2];
684 HOST_WIDE_INT ch;
685 unsigned int i, j;
686
687 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
688
689 c[0] = 0;
690 c[1] = 0;
691 ch = 1;
692 for (i = 0; i < GET_MODE_SIZE (mode); i++)
693 {
694 j = i;
695 if (WORDS_BIG_ENDIAN)
696 j = GET_MODE_SIZE (mode) - i - 1;
697 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
698 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
699 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
700 j *= BITS_PER_UNIT;
701 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
702
703 if (ch)
704 ch = (unsigned char) str[i];
705 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
706 }
707 return immed_double_const (c[0], c[1], mode);
708 }
709
710 /* Cast a target constant CST to target CHAR and if that value fits into
711 host char type, return zero and put that value into variable pointed to by
712 P. */
713
714 static int
715 target_char_cast (tree cst, char *p)
716 {
717 unsigned HOST_WIDE_INT val, hostval;
718
719 if (TREE_CODE (cst) != INTEGER_CST
720 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
721 return 1;
722
723 val = TREE_INT_CST_LOW (cst);
724 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
725 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
726
727 hostval = val;
728 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
729 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
730
731 if (val != hostval)
732 return 1;
733
734 *p = hostval;
735 return 0;
736 }
737
738 /* Similar to save_expr, but assumes that arbitrary code is not executed
739 in between the multiple evaluations. In particular, we assume that a
740 non-addressable local variable will not be modified. */
741
742 static tree
743 builtin_save_expr (tree exp)
744 {
745 if (TREE_CODE (exp) == SSA_NAME
746 || (TREE_ADDRESSABLE (exp) == 0
747 && (TREE_CODE (exp) == PARM_DECL
748 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
749 return exp;
750
751 return save_expr (exp);
752 }
753
754 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
755 times to get the address of either a higher stack frame, or a return
756 address located within it (depending on FNDECL_CODE). */
757
758 static rtx
759 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
760 {
761 int i;
762
763 #ifdef INITIAL_FRAME_ADDRESS_RTX
764 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
765 #else
766 rtx tem;
767
768 /* For a zero count with __builtin_return_address, we don't care what
769 frame address we return, because target-specific definitions will
770 override us. Therefore frame pointer elimination is OK, and using
771 the soft frame pointer is OK.
772
773 For a nonzero count, or a zero count with __builtin_frame_address,
774 we require a stable offset from the current frame pointer to the
775 previous one, so we must use the hard frame pointer, and
776 we must disable frame pointer elimination. */
777 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
778 tem = frame_pointer_rtx;
779 else
780 {
781 tem = hard_frame_pointer_rtx;
782
783 /* Tell reload not to eliminate the frame pointer. */
784 crtl->accesses_prior_frames = 1;
785 }
786 #endif
787
788 /* Some machines need special handling before we can access
789 arbitrary frames. For example, on the SPARC, we must first flush
790 all register windows to the stack. */
791 #ifdef SETUP_FRAME_ADDRESSES
792 if (count > 0)
793 SETUP_FRAME_ADDRESSES ();
794 #endif
795
796 /* On the SPARC, the return address is not in the frame, it is in a
797 register. There is no way to access it off of the current frame
798 pointer, but it can be accessed off the previous frame pointer by
799 reading the value from the register window save area. */
800 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
801 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
802 count--;
803 #endif
804
805 /* Scan back COUNT frames to the specified frame. */
806 for (i = 0; i < count; i++)
807 {
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810 #ifdef DYNAMIC_CHAIN_ADDRESS
811 tem = DYNAMIC_CHAIN_ADDRESS (tem);
812 #endif
813 tem = memory_address (Pmode, tem);
814 tem = gen_frame_mem (Pmode, tem);
815 tem = copy_to_reg (tem);
816 }
817
818 /* For __builtin_frame_address, return what we've got. But, on
819 the SPARC for example, we may have to add a bias. */
820 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
821 #ifdef FRAME_ADDR_RTX
822 return FRAME_ADDR_RTX (tem);
823 #else
824 return tem;
825 #endif
826
827 /* For __builtin_return_address, get the return address from that frame. */
828 #ifdef RETURN_ADDR_RTX
829 tem = RETURN_ADDR_RTX (count, tem);
830 #else
831 tem = memory_address (Pmode,
832 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
833 tem = gen_frame_mem (Pmode, tem);
834 #endif
835 return tem;
836 }
837
838 /* Alias set used for setjmp buffer. */
839 static alias_set_type setjmp_alias_set = -1;
840
841 /* Construct the leading half of a __builtin_setjmp call. Control will
842 return to RECEIVER_LABEL. This is also called directly by the SJLJ
843 exception handling code. */
844
845 void
846 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
847 {
848 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
849 rtx stack_save;
850 rtx mem;
851
852 if (setjmp_alias_set == -1)
853 setjmp_alias_set = new_alias_set ();
854
855 buf_addr = convert_memory_address (Pmode, buf_addr);
856
857 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
858
859 /* We store the frame pointer and the address of receiver_label in
860 the buffer and use the rest of it for the stack save area, which
861 is machine-dependent. */
862
863 mem = gen_rtx_MEM (Pmode, buf_addr);
864 set_mem_alias_set (mem, setjmp_alias_set);
865 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
866
867 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
868 GET_MODE_SIZE (Pmode))),
869 set_mem_alias_set (mem, setjmp_alias_set);
870
871 emit_move_insn (validize_mem (mem),
872 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
873
874 stack_save = gen_rtx_MEM (sa_mode,
875 plus_constant (Pmode, buf_addr,
876 2 * GET_MODE_SIZE (Pmode)));
877 set_mem_alias_set (stack_save, setjmp_alias_set);
878 emit_stack_save (SAVE_NONLOCAL, &stack_save);
879
880 /* If there is further processing to do, do it. */
881 #ifdef HAVE_builtin_setjmp_setup
882 if (HAVE_builtin_setjmp_setup)
883 emit_insn (gen_builtin_setjmp_setup (buf_addr));
884 #endif
885
886 /* We have a nonlocal label. */
887 cfun->has_nonlocal_label = 1;
888 }
889
890 /* Construct the trailing part of a __builtin_setjmp call. This is
891 also called directly by the SJLJ exception handling code.
892 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
893
894 void
895 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
896 {
897 rtx chain;
898
899 /* Mark the FP as used when we get here, so we have to make sure it's
900 marked as used by this function. */
901 emit_use (hard_frame_pointer_rtx);
902
903 /* Mark the static chain as clobbered here so life information
904 doesn't get messed up for it. */
905 chain = targetm.calls.static_chain (current_function_decl, true);
906 if (chain && REG_P (chain))
907 emit_clobber (chain);
908
909 /* Now put in the code to restore the frame pointer, and argument
910 pointer, if needed. */
911 #ifdef HAVE_nonlocal_goto
912 if (! HAVE_nonlocal_goto)
913 #endif
914 /* First adjust our frame pointer to its actual value. It was
915 previously set to the start of the virtual area corresponding to
916 the stacked variables when we branched here and now needs to be
917 adjusted to the actual hardware fp value.
918
919 Assignments to virtual registers are converted by
920 instantiate_virtual_regs into the corresponding assignment
921 to the underlying register (fp in this case) that makes
922 the original assignment true.
923 So the following insn will actually be decrementing fp by
924 STARTING_FRAME_OFFSET. */
925 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
926
927 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
928 if (fixed_regs[ARG_POINTER_REGNUM])
929 {
930 #ifdef ELIMINABLE_REGS
931 /* If the argument pointer can be eliminated in favor of the
932 frame pointer, we don't need to restore it. We assume here
933 that if such an elimination is present, it can always be used.
934 This is the case on all known machines; if we don't make this
935 assumption, we do unnecessary saving on many machines. */
936 size_t i;
937 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
938
939 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
940 if (elim_regs[i].from == ARG_POINTER_REGNUM
941 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
942 break;
943
944 if (i == ARRAY_SIZE (elim_regs))
945 #endif
946 {
947 /* Now restore our arg pointer from the address at which it
948 was saved in our stack frame. */
949 emit_move_insn (crtl->args.internal_arg_pointer,
950 copy_to_reg (get_arg_pointer_save_area ()));
951 }
952 }
953 #endif
954
955 #ifdef HAVE_builtin_setjmp_receiver
956 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
957 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
958 else
959 #endif
960 #ifdef HAVE_nonlocal_goto_receiver
961 if (HAVE_nonlocal_goto_receiver)
962 emit_insn (gen_nonlocal_goto_receiver ());
963 else
964 #endif
965 { /* Nothing */ }
966
967 /* We must not allow the code we just generated to be reordered by
968 scheduling. Specifically, the update of the frame pointer must
969 happen immediately, not later. Similarly, we must block
970 (frame-related) register values to be used across this code. */
971 emit_insn (gen_blockage ());
972 }
973
974 /* __builtin_longjmp is passed a pointer to an array of five words (not
975 all will be used on all machines). It operates similarly to the C
976 library function of the same name, but is more efficient. Much of
977 the code below is copied from the handling of non-local gotos. */
978
979 static void
980 expand_builtin_longjmp (rtx buf_addr, rtx value)
981 {
982 rtx fp, lab, stack, insn, last;
983 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
984
985 /* DRAP is needed for stack realign if longjmp is expanded to current
986 function */
987 if (SUPPORTS_STACK_ALIGNMENT)
988 crtl->need_drap = true;
989
990 if (setjmp_alias_set == -1)
991 setjmp_alias_set = new_alias_set ();
992
993 buf_addr = convert_memory_address (Pmode, buf_addr);
994
995 buf_addr = force_reg (Pmode, buf_addr);
996
997 /* We require that the user must pass a second argument of 1, because
998 that is what builtin_setjmp will return. */
999 gcc_assert (value == const1_rtx);
1000
1001 last = get_last_insn ();
1002 #ifdef HAVE_builtin_longjmp
1003 if (HAVE_builtin_longjmp)
1004 emit_insn (gen_builtin_longjmp (buf_addr));
1005 else
1006 #endif
1007 {
1008 fp = gen_rtx_MEM (Pmode, buf_addr);
1009 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1010 GET_MODE_SIZE (Pmode)));
1011
1012 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1013 2 * GET_MODE_SIZE (Pmode)));
1014 set_mem_alias_set (fp, setjmp_alias_set);
1015 set_mem_alias_set (lab, setjmp_alias_set);
1016 set_mem_alias_set (stack, setjmp_alias_set);
1017
1018 /* Pick up FP, label, and SP from the block and jump. This code is
1019 from expand_goto in stmt.c; see there for detailed comments. */
1020 #ifdef HAVE_nonlocal_goto
1021 if (HAVE_nonlocal_goto)
1022 /* We have to pass a value to the nonlocal_goto pattern that will
1023 get copied into the static_chain pointer, but it does not matter
1024 what that value is, because builtin_setjmp does not use it. */
1025 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1026 else
1027 #endif
1028 {
1029 lab = copy_to_reg (lab);
1030
1031 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1032 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1033
1034 emit_move_insn (hard_frame_pointer_rtx, fp);
1035 emit_stack_restore (SAVE_NONLOCAL, stack);
1036
1037 emit_use (hard_frame_pointer_rtx);
1038 emit_use (stack_pointer_rtx);
1039 emit_indirect_jump (lab);
1040 }
1041 }
1042
1043 /* Search backwards and mark the jump insn as a non-local goto.
1044 Note that this precludes the use of __builtin_longjmp to a
1045 __builtin_setjmp target in the same function. However, we've
1046 already cautioned the user that these functions are for
1047 internal exception handling use only. */
1048 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1049 {
1050 gcc_assert (insn != last);
1051
1052 if (JUMP_P (insn))
1053 {
1054 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1055 break;
1056 }
1057 else if (CALL_P (insn))
1058 break;
1059 }
1060 }
1061
1062 static inline bool
1063 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1064 {
1065 return (iter->i < iter->n);
1066 }
1067
1068 /* This function validates the types of a function call argument list
1069 against a specified list of tree_codes. If the last specifier is a 0,
1070 that represents an ellipses, otherwise the last specifier must be a
1071 VOID_TYPE. */
1072
1073 static bool
1074 validate_arglist (const_tree callexpr, ...)
1075 {
1076 enum tree_code code;
1077 bool res = 0;
1078 va_list ap;
1079 const_call_expr_arg_iterator iter;
1080 const_tree arg;
1081
1082 va_start (ap, callexpr);
1083 init_const_call_expr_arg_iterator (callexpr, &iter);
1084
1085 do
1086 {
1087 code = (enum tree_code) va_arg (ap, int);
1088 switch (code)
1089 {
1090 case 0:
1091 /* This signifies an ellipses, any further arguments are all ok. */
1092 res = true;
1093 goto end;
1094 case VOID_TYPE:
1095 /* This signifies an endlink, if no arguments remain, return
1096 true, otherwise return false. */
1097 res = !more_const_call_expr_args_p (&iter);
1098 goto end;
1099 default:
1100 /* If no parameters remain or the parameter's code does not
1101 match the specified code, return false. Otherwise continue
1102 checking any remaining arguments. */
1103 arg = next_const_call_expr_arg (&iter);
1104 if (!validate_arg (arg, code))
1105 goto end;
1106 break;
1107 }
1108 }
1109 while (1);
1110
1111 /* We need gotos here since we can only have one VA_CLOSE in a
1112 function. */
1113 end: ;
1114 va_end (ap);
1115
1116 return res;
1117 }
1118
1119 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1120 and the address of the save area. */
1121
1122 static rtx
1123 expand_builtin_nonlocal_goto (tree exp)
1124 {
1125 tree t_label, t_save_area;
1126 rtx r_label, r_save_area, r_fp, r_sp, insn;
1127
1128 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1129 return NULL_RTX;
1130
1131 t_label = CALL_EXPR_ARG (exp, 0);
1132 t_save_area = CALL_EXPR_ARG (exp, 1);
1133
1134 r_label = expand_normal (t_label);
1135 r_label = convert_memory_address (Pmode, r_label);
1136 r_save_area = expand_normal (t_save_area);
1137 r_save_area = convert_memory_address (Pmode, r_save_area);
1138 /* Copy the address of the save location to a register just in case it was
1139 based on the frame pointer. */
1140 r_save_area = copy_to_reg (r_save_area);
1141 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1142 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1143 plus_constant (Pmode, r_save_area,
1144 GET_MODE_SIZE (Pmode)));
1145
1146 crtl->has_nonlocal_goto = 1;
1147
1148 #ifdef HAVE_nonlocal_goto
1149 /* ??? We no longer need to pass the static chain value, afaik. */
1150 if (HAVE_nonlocal_goto)
1151 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1152 else
1153 #endif
1154 {
1155 r_label = copy_to_reg (r_label);
1156
1157 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1158 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1159
1160 /* Restore frame pointer for containing function. */
1161 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1162 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1163
1164 /* USE of hard_frame_pointer_rtx added for consistency;
1165 not clear if really needed. */
1166 emit_use (hard_frame_pointer_rtx);
1167 emit_use (stack_pointer_rtx);
1168
1169 /* If the architecture is using a GP register, we must
1170 conservatively assume that the target function makes use of it.
1171 The prologue of functions with nonlocal gotos must therefore
1172 initialize the GP register to the appropriate value, and we
1173 must then make sure that this value is live at the point
1174 of the jump. (Note that this doesn't necessarily apply
1175 to targets with a nonlocal_goto pattern; they are free
1176 to implement it in their own way. Note also that this is
1177 a no-op if the GP register is a global invariant.) */
1178 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1179 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1180 emit_use (pic_offset_table_rtx);
1181
1182 emit_indirect_jump (r_label);
1183 }
1184
1185 /* Search backwards to the jump insn and mark it as a
1186 non-local goto. */
1187 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1188 {
1189 if (JUMP_P (insn))
1190 {
1191 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1192 break;
1193 }
1194 else if (CALL_P (insn))
1195 break;
1196 }
1197
1198 return const0_rtx;
1199 }
1200
1201 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1202 (not all will be used on all machines) that was passed to __builtin_setjmp.
1203 It updates the stack pointer in that block to correspond to the current
1204 stack pointer. */
1205
1206 static void
1207 expand_builtin_update_setjmp_buf (rtx buf_addr)
1208 {
1209 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1210 rtx stack_save
1211 = gen_rtx_MEM (sa_mode,
1212 memory_address
1213 (sa_mode,
1214 plus_constant (Pmode, buf_addr,
1215 2 * GET_MODE_SIZE (Pmode))));
1216
1217 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1218 }
1219
1220 /* Expand a call to __builtin_prefetch. For a target that does not support
1221 data prefetch, evaluate the memory address argument in case it has side
1222 effects. */
1223
1224 static void
1225 expand_builtin_prefetch (tree exp)
1226 {
1227 tree arg0, arg1, arg2;
1228 int nargs;
1229 rtx op0, op1, op2;
1230
1231 if (!validate_arglist (exp, POINTER_TYPE, 0))
1232 return;
1233
1234 arg0 = CALL_EXPR_ARG (exp, 0);
1235
1236 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1237 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1238 locality). */
1239 nargs = call_expr_nargs (exp);
1240 if (nargs > 1)
1241 arg1 = CALL_EXPR_ARG (exp, 1);
1242 else
1243 arg1 = integer_zero_node;
1244 if (nargs > 2)
1245 arg2 = CALL_EXPR_ARG (exp, 2);
1246 else
1247 arg2 = integer_three_node;
1248
1249 /* Argument 0 is an address. */
1250 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1251
1252 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1253 if (TREE_CODE (arg1) != INTEGER_CST)
1254 {
1255 error ("second argument to %<__builtin_prefetch%> must be a constant");
1256 arg1 = integer_zero_node;
1257 }
1258 op1 = expand_normal (arg1);
1259 /* Argument 1 must be either zero or one. */
1260 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1261 {
1262 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1263 " using zero");
1264 op1 = const0_rtx;
1265 }
1266
1267 /* Argument 2 (locality) must be a compile-time constant int. */
1268 if (TREE_CODE (arg2) != INTEGER_CST)
1269 {
1270 error ("third argument to %<__builtin_prefetch%> must be a constant");
1271 arg2 = integer_zero_node;
1272 }
1273 op2 = expand_normal (arg2);
1274 /* Argument 2 must be 0, 1, 2, or 3. */
1275 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1276 {
1277 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1278 op2 = const0_rtx;
1279 }
1280
1281 #ifdef HAVE_prefetch
1282 if (HAVE_prefetch)
1283 {
1284 struct expand_operand ops[3];
1285
1286 create_address_operand (&ops[0], op0);
1287 create_integer_operand (&ops[1], INTVAL (op1));
1288 create_integer_operand (&ops[2], INTVAL (op2));
1289 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1290 return;
1291 }
1292 #endif
1293
1294 /* Don't do anything with direct references to volatile memory, but
1295 generate code to handle other side effects. */
1296 if (!MEM_P (op0) && side_effects_p (op0))
1297 emit_insn (op0);
1298 }
1299
1300 /* Get a MEM rtx for expression EXP which is the address of an operand
1301 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1302 the maximum length of the block of memory that might be accessed or
1303 NULL if unknown. */
1304
1305 static rtx
1306 get_memory_rtx (tree exp, tree len)
1307 {
1308 tree orig_exp = exp;
1309 rtx addr, mem;
1310
1311 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1312 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1313 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1314 exp = TREE_OPERAND (exp, 0);
1315
1316 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1317 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1318
1319 /* Get an expression we can use to find the attributes to assign to MEM.
1320 First remove any nops. */
1321 while (CONVERT_EXPR_P (exp)
1322 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1323 exp = TREE_OPERAND (exp, 0);
1324
1325 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1326 (as builtin stringops may alias with anything). */
1327 exp = fold_build2 (MEM_REF,
1328 build_array_type (char_type_node,
1329 build_range_type (sizetype,
1330 size_one_node, len)),
1331 exp, build_int_cst (ptr_type_node, 0));
1332
1333 /* If the MEM_REF has no acceptable address, try to get the base object
1334 from the original address we got, and build an all-aliasing
1335 unknown-sized access to that one. */
1336 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1337 set_mem_attributes (mem, exp, 0);
1338 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1339 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1340 0))))
1341 {
1342 exp = build_fold_addr_expr (exp);
1343 exp = fold_build2 (MEM_REF,
1344 build_array_type (char_type_node,
1345 build_range_type (sizetype,
1346 size_zero_node,
1347 NULL)),
1348 exp, build_int_cst (ptr_type_node, 0));
1349 set_mem_attributes (mem, exp, 0);
1350 }
1351 set_mem_alias_set (mem, 0);
1352 return mem;
1353 }
1354 \f
1355 /* Built-in functions to perform an untyped call and return. */
1356
1357 #define apply_args_mode \
1358 (this_target_builtins->x_apply_args_mode)
1359 #define apply_result_mode \
1360 (this_target_builtins->x_apply_result_mode)
1361
1362 /* Return the size required for the block returned by __builtin_apply_args,
1363 and initialize apply_args_mode. */
1364
1365 static int
1366 apply_args_size (void)
1367 {
1368 static int size = -1;
1369 int align;
1370 unsigned int regno;
1371 enum machine_mode mode;
1372
1373 /* The values computed by this function never change. */
1374 if (size < 0)
1375 {
1376 /* The first value is the incoming arg-pointer. */
1377 size = GET_MODE_SIZE (Pmode);
1378
1379 /* The second value is the structure value address unless this is
1380 passed as an "invisible" first argument. */
1381 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1382 size += GET_MODE_SIZE (Pmode);
1383
1384 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1385 if (FUNCTION_ARG_REGNO_P (regno))
1386 {
1387 mode = targetm.calls.get_raw_arg_mode (regno);
1388
1389 gcc_assert (mode != VOIDmode);
1390
1391 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1392 if (size % align != 0)
1393 size = CEIL (size, align) * align;
1394 size += GET_MODE_SIZE (mode);
1395 apply_args_mode[regno] = mode;
1396 }
1397 else
1398 {
1399 apply_args_mode[regno] = VOIDmode;
1400 }
1401 }
1402 return size;
1403 }
1404
1405 /* Return the size required for the block returned by __builtin_apply,
1406 and initialize apply_result_mode. */
1407
1408 static int
1409 apply_result_size (void)
1410 {
1411 static int size = -1;
1412 int align, regno;
1413 enum machine_mode mode;
1414
1415 /* The values computed by this function never change. */
1416 if (size < 0)
1417 {
1418 size = 0;
1419
1420 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1421 if (targetm.calls.function_value_regno_p (regno))
1422 {
1423 mode = targetm.calls.get_raw_result_mode (regno);
1424
1425 gcc_assert (mode != VOIDmode);
1426
1427 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1428 if (size % align != 0)
1429 size = CEIL (size, align) * align;
1430 size += GET_MODE_SIZE (mode);
1431 apply_result_mode[regno] = mode;
1432 }
1433 else
1434 apply_result_mode[regno] = VOIDmode;
1435
1436 /* Allow targets that use untyped_call and untyped_return to override
1437 the size so that machine-specific information can be stored here. */
1438 #ifdef APPLY_RESULT_SIZE
1439 size = APPLY_RESULT_SIZE;
1440 #endif
1441 }
1442 return size;
1443 }
1444
1445 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1446 /* Create a vector describing the result block RESULT. If SAVEP is true,
1447 the result block is used to save the values; otherwise it is used to
1448 restore the values. */
1449
1450 static rtx
1451 result_vector (int savep, rtx result)
1452 {
1453 int regno, size, align, nelts;
1454 enum machine_mode mode;
1455 rtx reg, mem;
1456 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1457
1458 size = nelts = 0;
1459 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1460 if ((mode = apply_result_mode[regno]) != VOIDmode)
1461 {
1462 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1463 if (size % align != 0)
1464 size = CEIL (size, align) * align;
1465 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1466 mem = adjust_address (result, mode, size);
1467 savevec[nelts++] = (savep
1468 ? gen_rtx_SET (VOIDmode, mem, reg)
1469 : gen_rtx_SET (VOIDmode, reg, mem));
1470 size += GET_MODE_SIZE (mode);
1471 }
1472 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1473 }
1474 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1475
1476 /* Save the state required to perform an untyped call with the same
1477 arguments as were passed to the current function. */
1478
1479 static rtx
1480 expand_builtin_apply_args_1 (void)
1481 {
1482 rtx registers, tem;
1483 int size, align, regno;
1484 enum machine_mode mode;
1485 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1486
1487 /* Create a block where the arg-pointer, structure value address,
1488 and argument registers can be saved. */
1489 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1490
1491 /* Walk past the arg-pointer and structure value address. */
1492 size = GET_MODE_SIZE (Pmode);
1493 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1494 size += GET_MODE_SIZE (Pmode);
1495
1496 /* Save each register used in calling a function to the block. */
1497 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1498 if ((mode = apply_args_mode[regno]) != VOIDmode)
1499 {
1500 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1501 if (size % align != 0)
1502 size = CEIL (size, align) * align;
1503
1504 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1505
1506 emit_move_insn (adjust_address (registers, mode, size), tem);
1507 size += GET_MODE_SIZE (mode);
1508 }
1509
1510 /* Save the arg pointer to the block. */
1511 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1512 #ifdef STACK_GROWS_DOWNWARD
1513 /* We need the pointer as the caller actually passed them to us, not
1514 as we might have pretended they were passed. Make sure it's a valid
1515 operand, as emit_move_insn isn't expected to handle a PLUS. */
1516 tem
1517 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1518 NULL_RTX);
1519 #endif
1520 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1521
1522 size = GET_MODE_SIZE (Pmode);
1523
1524 /* Save the structure value address unless this is passed as an
1525 "invisible" first argument. */
1526 if (struct_incoming_value)
1527 {
1528 emit_move_insn (adjust_address (registers, Pmode, size),
1529 copy_to_reg (struct_incoming_value));
1530 size += GET_MODE_SIZE (Pmode);
1531 }
1532
1533 /* Return the address of the block. */
1534 return copy_addr_to_reg (XEXP (registers, 0));
1535 }
1536
1537 /* __builtin_apply_args returns block of memory allocated on
1538 the stack into which is stored the arg pointer, structure
1539 value address, static chain, and all the registers that might
1540 possibly be used in performing a function call. The code is
1541 moved to the start of the function so the incoming values are
1542 saved. */
1543
1544 static rtx
1545 expand_builtin_apply_args (void)
1546 {
1547 /* Don't do __builtin_apply_args more than once in a function.
1548 Save the result of the first call and reuse it. */
1549 if (apply_args_value != 0)
1550 return apply_args_value;
1551 {
1552 /* When this function is called, it means that registers must be
1553 saved on entry to this function. So we migrate the
1554 call to the first insn of this function. */
1555 rtx temp;
1556 rtx seq;
1557
1558 start_sequence ();
1559 temp = expand_builtin_apply_args_1 ();
1560 seq = get_insns ();
1561 end_sequence ();
1562
1563 apply_args_value = temp;
1564
1565 /* Put the insns after the NOTE that starts the function.
1566 If this is inside a start_sequence, make the outer-level insn
1567 chain current, so the code is placed at the start of the
1568 function. If internal_arg_pointer is a non-virtual pseudo,
1569 it needs to be placed after the function that initializes
1570 that pseudo. */
1571 push_topmost_sequence ();
1572 if (REG_P (crtl->args.internal_arg_pointer)
1573 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1574 emit_insn_before (seq, parm_birth_insn);
1575 else
1576 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1577 pop_topmost_sequence ();
1578 return temp;
1579 }
1580 }
1581
1582 /* Perform an untyped call and save the state required to perform an
1583 untyped return of whatever value was returned by the given function. */
1584
1585 static rtx
1586 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1587 {
1588 int size, align, regno;
1589 enum machine_mode mode;
1590 rtx incoming_args, result, reg, dest, src, call_insn;
1591 rtx old_stack_level = 0;
1592 rtx call_fusage = 0;
1593 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1594
1595 arguments = convert_memory_address (Pmode, arguments);
1596
1597 /* Create a block where the return registers can be saved. */
1598 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1599
1600 /* Fetch the arg pointer from the ARGUMENTS block. */
1601 incoming_args = gen_reg_rtx (Pmode);
1602 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1603 #ifndef STACK_GROWS_DOWNWARD
1604 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1605 incoming_args, 0, OPTAB_LIB_WIDEN);
1606 #endif
1607
1608 /* Push a new argument block and copy the arguments. Do not allow
1609 the (potential) memcpy call below to interfere with our stack
1610 manipulations. */
1611 do_pending_stack_adjust ();
1612 NO_DEFER_POP;
1613
1614 /* Save the stack with nonlocal if available. */
1615 #ifdef HAVE_save_stack_nonlocal
1616 if (HAVE_save_stack_nonlocal)
1617 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1618 else
1619 #endif
1620 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1621
1622 /* Allocate a block of memory onto the stack and copy the memory
1623 arguments to the outgoing arguments address. We can pass TRUE
1624 as the 4th argument because we just saved the stack pointer
1625 and will restore it right after the call. */
1626 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1627
1628 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1629 may have already set current_function_calls_alloca to true.
1630 current_function_calls_alloca won't be set if argsize is zero,
1631 so we have to guarantee need_drap is true here. */
1632 if (SUPPORTS_STACK_ALIGNMENT)
1633 crtl->need_drap = true;
1634
1635 dest = virtual_outgoing_args_rtx;
1636 #ifndef STACK_GROWS_DOWNWARD
1637 if (CONST_INT_P (argsize))
1638 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1639 else
1640 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1641 #endif
1642 dest = gen_rtx_MEM (BLKmode, dest);
1643 set_mem_align (dest, PARM_BOUNDARY);
1644 src = gen_rtx_MEM (BLKmode, incoming_args);
1645 set_mem_align (src, PARM_BOUNDARY);
1646 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1647
1648 /* Refer to the argument block. */
1649 apply_args_size ();
1650 arguments = gen_rtx_MEM (BLKmode, arguments);
1651 set_mem_align (arguments, PARM_BOUNDARY);
1652
1653 /* Walk past the arg-pointer and structure value address. */
1654 size = GET_MODE_SIZE (Pmode);
1655 if (struct_value)
1656 size += GET_MODE_SIZE (Pmode);
1657
1658 /* Restore each of the registers previously saved. Make USE insns
1659 for each of these registers for use in making the call. */
1660 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1661 if ((mode = apply_args_mode[regno]) != VOIDmode)
1662 {
1663 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1664 if (size % align != 0)
1665 size = CEIL (size, align) * align;
1666 reg = gen_rtx_REG (mode, regno);
1667 emit_move_insn (reg, adjust_address (arguments, mode, size));
1668 use_reg (&call_fusage, reg);
1669 size += GET_MODE_SIZE (mode);
1670 }
1671
1672 /* Restore the structure value address unless this is passed as an
1673 "invisible" first argument. */
1674 size = GET_MODE_SIZE (Pmode);
1675 if (struct_value)
1676 {
1677 rtx value = gen_reg_rtx (Pmode);
1678 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1679 emit_move_insn (struct_value, value);
1680 if (REG_P (struct_value))
1681 use_reg (&call_fusage, struct_value);
1682 size += GET_MODE_SIZE (Pmode);
1683 }
1684
1685 /* All arguments and registers used for the call are set up by now! */
1686 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1687
1688 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1689 and we don't want to load it into a register as an optimization,
1690 because prepare_call_address already did it if it should be done. */
1691 if (GET_CODE (function) != SYMBOL_REF)
1692 function = memory_address (FUNCTION_MODE, function);
1693
1694 /* Generate the actual call instruction and save the return value. */
1695 #ifdef HAVE_untyped_call
1696 if (HAVE_untyped_call)
1697 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1698 result, result_vector (1, result)));
1699 else
1700 #endif
1701 #ifdef HAVE_call_value
1702 if (HAVE_call_value)
1703 {
1704 rtx valreg = 0;
1705
1706 /* Locate the unique return register. It is not possible to
1707 express a call that sets more than one return register using
1708 call_value; use untyped_call for that. In fact, untyped_call
1709 only needs to save the return registers in the given block. */
1710 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1711 if ((mode = apply_result_mode[regno]) != VOIDmode)
1712 {
1713 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1714
1715 valreg = gen_rtx_REG (mode, regno);
1716 }
1717
1718 emit_call_insn (GEN_CALL_VALUE (valreg,
1719 gen_rtx_MEM (FUNCTION_MODE, function),
1720 const0_rtx, NULL_RTX, const0_rtx));
1721
1722 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1723 }
1724 else
1725 #endif
1726 gcc_unreachable ();
1727
1728 /* Find the CALL insn we just emitted, and attach the register usage
1729 information. */
1730 call_insn = last_call_insn ();
1731 add_function_usage_to (call_insn, call_fusage);
1732
1733 /* Restore the stack. */
1734 #ifdef HAVE_save_stack_nonlocal
1735 if (HAVE_save_stack_nonlocal)
1736 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1737 else
1738 #endif
1739 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1740 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1741
1742 OK_DEFER_POP;
1743
1744 /* Return the address of the result block. */
1745 result = copy_addr_to_reg (XEXP (result, 0));
1746 return convert_memory_address (ptr_mode, result);
1747 }
1748
1749 /* Perform an untyped return. */
1750
1751 static void
1752 expand_builtin_return (rtx result)
1753 {
1754 int size, align, regno;
1755 enum machine_mode mode;
1756 rtx reg;
1757 rtx call_fusage = 0;
1758
1759 result = convert_memory_address (Pmode, result);
1760
1761 apply_result_size ();
1762 result = gen_rtx_MEM (BLKmode, result);
1763
1764 #ifdef HAVE_untyped_return
1765 if (HAVE_untyped_return)
1766 {
1767 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1768 emit_barrier ();
1769 return;
1770 }
1771 #endif
1772
1773 /* Restore the return value and note that each value is used. */
1774 size = 0;
1775 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1776 if ((mode = apply_result_mode[regno]) != VOIDmode)
1777 {
1778 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1779 if (size % align != 0)
1780 size = CEIL (size, align) * align;
1781 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1782 emit_move_insn (reg, adjust_address (result, mode, size));
1783
1784 push_to_sequence (call_fusage);
1785 emit_use (reg);
1786 call_fusage = get_insns ();
1787 end_sequence ();
1788 size += GET_MODE_SIZE (mode);
1789 }
1790
1791 /* Put the USE insns before the return. */
1792 emit_insn (call_fusage);
1793
1794 /* Return whatever values was restored by jumping directly to the end
1795 of the function. */
1796 expand_naked_return ();
1797 }
1798
1799 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1800
1801 static enum type_class
1802 type_to_class (tree type)
1803 {
1804 switch (TREE_CODE (type))
1805 {
1806 case VOID_TYPE: return void_type_class;
1807 case INTEGER_TYPE: return integer_type_class;
1808 case ENUMERAL_TYPE: return enumeral_type_class;
1809 case BOOLEAN_TYPE: return boolean_type_class;
1810 case POINTER_TYPE: return pointer_type_class;
1811 case REFERENCE_TYPE: return reference_type_class;
1812 case OFFSET_TYPE: return offset_type_class;
1813 case REAL_TYPE: return real_type_class;
1814 case COMPLEX_TYPE: return complex_type_class;
1815 case FUNCTION_TYPE: return function_type_class;
1816 case METHOD_TYPE: return method_type_class;
1817 case RECORD_TYPE: return record_type_class;
1818 case UNION_TYPE:
1819 case QUAL_UNION_TYPE: return union_type_class;
1820 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1821 ? string_type_class : array_type_class);
1822 case LANG_TYPE: return lang_type_class;
1823 default: return no_type_class;
1824 }
1825 }
1826
1827 /* Expand a call EXP to __builtin_classify_type. */
1828
1829 static rtx
1830 expand_builtin_classify_type (tree exp)
1831 {
1832 if (call_expr_nargs (exp))
1833 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1834 return GEN_INT (no_type_class);
1835 }
1836
1837 /* This helper macro, meant to be used in mathfn_built_in below,
1838 determines which among a set of three builtin math functions is
1839 appropriate for a given type mode. The `F' and `L' cases are
1840 automatically generated from the `double' case. */
1841 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1842 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1843 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1844 fcodel = BUILT_IN_MATHFN##L ; break;
1845 /* Similar to above, but appends _R after any F/L suffix. */
1846 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1847 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1848 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1849 fcodel = BUILT_IN_MATHFN##L_R ; break;
1850
1851 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1852 if available. If IMPLICIT is true use the implicit builtin declaration,
1853 otherwise use the explicit declaration. If we can't do the conversion,
1854 return zero. */
1855
1856 static tree
1857 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1858 {
1859 enum built_in_function fcode, fcodef, fcodel, fcode2;
1860
1861 switch (fn)
1862 {
1863 CASE_MATHFN (BUILT_IN_ACOS)
1864 CASE_MATHFN (BUILT_IN_ACOSH)
1865 CASE_MATHFN (BUILT_IN_ASIN)
1866 CASE_MATHFN (BUILT_IN_ASINH)
1867 CASE_MATHFN (BUILT_IN_ATAN)
1868 CASE_MATHFN (BUILT_IN_ATAN2)
1869 CASE_MATHFN (BUILT_IN_ATANH)
1870 CASE_MATHFN (BUILT_IN_CBRT)
1871 CASE_MATHFN (BUILT_IN_CEIL)
1872 CASE_MATHFN (BUILT_IN_CEXPI)
1873 CASE_MATHFN (BUILT_IN_COPYSIGN)
1874 CASE_MATHFN (BUILT_IN_COS)
1875 CASE_MATHFN (BUILT_IN_COSH)
1876 CASE_MATHFN (BUILT_IN_DREM)
1877 CASE_MATHFN (BUILT_IN_ERF)
1878 CASE_MATHFN (BUILT_IN_ERFC)
1879 CASE_MATHFN (BUILT_IN_EXP)
1880 CASE_MATHFN (BUILT_IN_EXP10)
1881 CASE_MATHFN (BUILT_IN_EXP2)
1882 CASE_MATHFN (BUILT_IN_EXPM1)
1883 CASE_MATHFN (BUILT_IN_FABS)
1884 CASE_MATHFN (BUILT_IN_FDIM)
1885 CASE_MATHFN (BUILT_IN_FLOOR)
1886 CASE_MATHFN (BUILT_IN_FMA)
1887 CASE_MATHFN (BUILT_IN_FMAX)
1888 CASE_MATHFN (BUILT_IN_FMIN)
1889 CASE_MATHFN (BUILT_IN_FMOD)
1890 CASE_MATHFN (BUILT_IN_FREXP)
1891 CASE_MATHFN (BUILT_IN_GAMMA)
1892 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1893 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1894 CASE_MATHFN (BUILT_IN_HYPOT)
1895 CASE_MATHFN (BUILT_IN_ILOGB)
1896 CASE_MATHFN (BUILT_IN_ICEIL)
1897 CASE_MATHFN (BUILT_IN_IFLOOR)
1898 CASE_MATHFN (BUILT_IN_INF)
1899 CASE_MATHFN (BUILT_IN_IRINT)
1900 CASE_MATHFN (BUILT_IN_IROUND)
1901 CASE_MATHFN (BUILT_IN_ISINF)
1902 CASE_MATHFN (BUILT_IN_J0)
1903 CASE_MATHFN (BUILT_IN_J1)
1904 CASE_MATHFN (BUILT_IN_JN)
1905 CASE_MATHFN (BUILT_IN_LCEIL)
1906 CASE_MATHFN (BUILT_IN_LDEXP)
1907 CASE_MATHFN (BUILT_IN_LFLOOR)
1908 CASE_MATHFN (BUILT_IN_LGAMMA)
1909 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1910 CASE_MATHFN (BUILT_IN_LLCEIL)
1911 CASE_MATHFN (BUILT_IN_LLFLOOR)
1912 CASE_MATHFN (BUILT_IN_LLRINT)
1913 CASE_MATHFN (BUILT_IN_LLROUND)
1914 CASE_MATHFN (BUILT_IN_LOG)
1915 CASE_MATHFN (BUILT_IN_LOG10)
1916 CASE_MATHFN (BUILT_IN_LOG1P)
1917 CASE_MATHFN (BUILT_IN_LOG2)
1918 CASE_MATHFN (BUILT_IN_LOGB)
1919 CASE_MATHFN (BUILT_IN_LRINT)
1920 CASE_MATHFN (BUILT_IN_LROUND)
1921 CASE_MATHFN (BUILT_IN_MODF)
1922 CASE_MATHFN (BUILT_IN_NAN)
1923 CASE_MATHFN (BUILT_IN_NANS)
1924 CASE_MATHFN (BUILT_IN_NEARBYINT)
1925 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1926 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1927 CASE_MATHFN (BUILT_IN_POW)
1928 CASE_MATHFN (BUILT_IN_POWI)
1929 CASE_MATHFN (BUILT_IN_POW10)
1930 CASE_MATHFN (BUILT_IN_REMAINDER)
1931 CASE_MATHFN (BUILT_IN_REMQUO)
1932 CASE_MATHFN (BUILT_IN_RINT)
1933 CASE_MATHFN (BUILT_IN_ROUND)
1934 CASE_MATHFN (BUILT_IN_SCALB)
1935 CASE_MATHFN (BUILT_IN_SCALBLN)
1936 CASE_MATHFN (BUILT_IN_SCALBN)
1937 CASE_MATHFN (BUILT_IN_SIGNBIT)
1938 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1939 CASE_MATHFN (BUILT_IN_SIN)
1940 CASE_MATHFN (BUILT_IN_SINCOS)
1941 CASE_MATHFN (BUILT_IN_SINH)
1942 CASE_MATHFN (BUILT_IN_SQRT)
1943 CASE_MATHFN (BUILT_IN_TAN)
1944 CASE_MATHFN (BUILT_IN_TANH)
1945 CASE_MATHFN (BUILT_IN_TGAMMA)
1946 CASE_MATHFN (BUILT_IN_TRUNC)
1947 CASE_MATHFN (BUILT_IN_Y0)
1948 CASE_MATHFN (BUILT_IN_Y1)
1949 CASE_MATHFN (BUILT_IN_YN)
1950
1951 default:
1952 return NULL_TREE;
1953 }
1954
1955 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1956 fcode2 = fcode;
1957 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1958 fcode2 = fcodef;
1959 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1960 fcode2 = fcodel;
1961 else
1962 return NULL_TREE;
1963
1964 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1965 return NULL_TREE;
1966
1967 return builtin_decl_explicit (fcode2);
1968 }
1969
1970 /* Like mathfn_built_in_1(), but always use the implicit array. */
1971
1972 tree
1973 mathfn_built_in (tree type, enum built_in_function fn)
1974 {
1975 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1976 }
1977
1978 /* If errno must be maintained, expand the RTL to check if the result,
1979 TARGET, of a built-in function call, EXP, is NaN, and if so set
1980 errno to EDOM. */
1981
1982 static void
1983 expand_errno_check (tree exp, rtx target)
1984 {
1985 rtx lab = gen_label_rtx ();
1986
1987 /* Test the result; if it is NaN, set errno=EDOM because
1988 the argument was not in the domain. */
1989 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1990 NULL_RTX, NULL_RTX, lab,
1991 /* The jump is very likely. */
1992 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1993
1994 #ifdef TARGET_EDOM
1995 /* If this built-in doesn't throw an exception, set errno directly. */
1996 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1997 {
1998 #ifdef GEN_ERRNO_RTX
1999 rtx errno_rtx = GEN_ERRNO_RTX;
2000 #else
2001 rtx errno_rtx
2002 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2003 #endif
2004 emit_move_insn (errno_rtx,
2005 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2006 emit_label (lab);
2007 return;
2008 }
2009 #endif
2010
2011 /* Make sure the library call isn't expanded as a tail call. */
2012 CALL_EXPR_TAILCALL (exp) = 0;
2013
2014 /* We can't set errno=EDOM directly; let the library call do it.
2015 Pop the arguments right away in case the call gets deleted. */
2016 NO_DEFER_POP;
2017 expand_call (exp, target, 0);
2018 OK_DEFER_POP;
2019 emit_label (lab);
2020 }
2021
2022 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2023 Return NULL_RTX if a normal call should be emitted rather than expanding
2024 the function in-line. EXP is the expression that is a call to the builtin
2025 function; if convenient, the result should be placed in TARGET.
2026 SUBTARGET may be used as the target for computing one of EXP's operands. */
2027
2028 static rtx
2029 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2030 {
2031 optab builtin_optab;
2032 rtx op0, insns;
2033 tree fndecl = get_callee_fndecl (exp);
2034 enum machine_mode mode;
2035 bool errno_set = false;
2036 bool try_widening = false;
2037 tree arg;
2038
2039 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2040 return NULL_RTX;
2041
2042 arg = CALL_EXPR_ARG (exp, 0);
2043
2044 switch (DECL_FUNCTION_CODE (fndecl))
2045 {
2046 CASE_FLT_FN (BUILT_IN_SQRT):
2047 errno_set = ! tree_expr_nonnegative_p (arg);
2048 try_widening = true;
2049 builtin_optab = sqrt_optab;
2050 break;
2051 CASE_FLT_FN (BUILT_IN_EXP):
2052 errno_set = true; builtin_optab = exp_optab; break;
2053 CASE_FLT_FN (BUILT_IN_EXP10):
2054 CASE_FLT_FN (BUILT_IN_POW10):
2055 errno_set = true; builtin_optab = exp10_optab; break;
2056 CASE_FLT_FN (BUILT_IN_EXP2):
2057 errno_set = true; builtin_optab = exp2_optab; break;
2058 CASE_FLT_FN (BUILT_IN_EXPM1):
2059 errno_set = true; builtin_optab = expm1_optab; break;
2060 CASE_FLT_FN (BUILT_IN_LOGB):
2061 errno_set = true; builtin_optab = logb_optab; break;
2062 CASE_FLT_FN (BUILT_IN_LOG):
2063 errno_set = true; builtin_optab = log_optab; break;
2064 CASE_FLT_FN (BUILT_IN_LOG10):
2065 errno_set = true; builtin_optab = log10_optab; break;
2066 CASE_FLT_FN (BUILT_IN_LOG2):
2067 errno_set = true; builtin_optab = log2_optab; break;
2068 CASE_FLT_FN (BUILT_IN_LOG1P):
2069 errno_set = true; builtin_optab = log1p_optab; break;
2070 CASE_FLT_FN (BUILT_IN_ASIN):
2071 builtin_optab = asin_optab; break;
2072 CASE_FLT_FN (BUILT_IN_ACOS):
2073 builtin_optab = acos_optab; break;
2074 CASE_FLT_FN (BUILT_IN_TAN):
2075 builtin_optab = tan_optab; break;
2076 CASE_FLT_FN (BUILT_IN_ATAN):
2077 builtin_optab = atan_optab; break;
2078 CASE_FLT_FN (BUILT_IN_FLOOR):
2079 builtin_optab = floor_optab; break;
2080 CASE_FLT_FN (BUILT_IN_CEIL):
2081 builtin_optab = ceil_optab; break;
2082 CASE_FLT_FN (BUILT_IN_TRUNC):
2083 builtin_optab = btrunc_optab; break;
2084 CASE_FLT_FN (BUILT_IN_ROUND):
2085 builtin_optab = round_optab; break;
2086 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2087 builtin_optab = nearbyint_optab;
2088 if (flag_trapping_math)
2089 break;
2090 /* Else fallthrough and expand as rint. */
2091 CASE_FLT_FN (BUILT_IN_RINT):
2092 builtin_optab = rint_optab; break;
2093 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2094 builtin_optab = significand_optab; break;
2095 default:
2096 gcc_unreachable ();
2097 }
2098
2099 /* Make a suitable register to place result in. */
2100 mode = TYPE_MODE (TREE_TYPE (exp));
2101
2102 if (! flag_errno_math || ! HONOR_NANS (mode))
2103 errno_set = false;
2104
2105 /* Before working hard, check whether the instruction is available, but try
2106 to widen the mode for specific operations. */
2107 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2108 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2109 && (!errno_set || !optimize_insn_for_size_p ()))
2110 {
2111 rtx result = gen_reg_rtx (mode);
2112
2113 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2114 need to expand the argument again. This way, we will not perform
2115 side-effects more the once. */
2116 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2117
2118 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2119
2120 start_sequence ();
2121
2122 /* Compute into RESULT.
2123 Set RESULT to wherever the result comes back. */
2124 result = expand_unop (mode, builtin_optab, op0, result, 0);
2125
2126 if (result != 0)
2127 {
2128 if (errno_set)
2129 expand_errno_check (exp, result);
2130
2131 /* Output the entire sequence. */
2132 insns = get_insns ();
2133 end_sequence ();
2134 emit_insn (insns);
2135 return result;
2136 }
2137
2138 /* If we were unable to expand via the builtin, stop the sequence
2139 (without outputting the insns) and call to the library function
2140 with the stabilized argument list. */
2141 end_sequence ();
2142 }
2143
2144 return expand_call (exp, target, target == const0_rtx);
2145 }
2146
2147 /* Expand a call to the builtin binary math functions (pow and atan2).
2148 Return NULL_RTX if a normal call should be emitted rather than expanding the
2149 function in-line. EXP is the expression that is a call to the builtin
2150 function; if convenient, the result should be placed in TARGET.
2151 SUBTARGET may be used as the target for computing one of EXP's
2152 operands. */
2153
2154 static rtx
2155 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2156 {
2157 optab builtin_optab;
2158 rtx op0, op1, insns, result;
2159 int op1_type = REAL_TYPE;
2160 tree fndecl = get_callee_fndecl (exp);
2161 tree arg0, arg1;
2162 enum machine_mode mode;
2163 bool errno_set = true;
2164
2165 switch (DECL_FUNCTION_CODE (fndecl))
2166 {
2167 CASE_FLT_FN (BUILT_IN_SCALBN):
2168 CASE_FLT_FN (BUILT_IN_SCALBLN):
2169 CASE_FLT_FN (BUILT_IN_LDEXP):
2170 op1_type = INTEGER_TYPE;
2171 default:
2172 break;
2173 }
2174
2175 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2176 return NULL_RTX;
2177
2178 arg0 = CALL_EXPR_ARG (exp, 0);
2179 arg1 = CALL_EXPR_ARG (exp, 1);
2180
2181 switch (DECL_FUNCTION_CODE (fndecl))
2182 {
2183 CASE_FLT_FN (BUILT_IN_POW):
2184 builtin_optab = pow_optab; break;
2185 CASE_FLT_FN (BUILT_IN_ATAN2):
2186 builtin_optab = atan2_optab; break;
2187 CASE_FLT_FN (BUILT_IN_SCALB):
2188 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2189 return 0;
2190 builtin_optab = scalb_optab; break;
2191 CASE_FLT_FN (BUILT_IN_SCALBN):
2192 CASE_FLT_FN (BUILT_IN_SCALBLN):
2193 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2194 return 0;
2195 /* Fall through... */
2196 CASE_FLT_FN (BUILT_IN_LDEXP):
2197 builtin_optab = ldexp_optab; break;
2198 CASE_FLT_FN (BUILT_IN_FMOD):
2199 builtin_optab = fmod_optab; break;
2200 CASE_FLT_FN (BUILT_IN_REMAINDER):
2201 CASE_FLT_FN (BUILT_IN_DREM):
2202 builtin_optab = remainder_optab; break;
2203 default:
2204 gcc_unreachable ();
2205 }
2206
2207 /* Make a suitable register to place result in. */
2208 mode = TYPE_MODE (TREE_TYPE (exp));
2209
2210 /* Before working hard, check whether the instruction is available. */
2211 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2212 return NULL_RTX;
2213
2214 result = gen_reg_rtx (mode);
2215
2216 if (! flag_errno_math || ! HONOR_NANS (mode))
2217 errno_set = false;
2218
2219 if (errno_set && optimize_insn_for_size_p ())
2220 return 0;
2221
2222 /* Always stabilize the argument list. */
2223 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2224 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2225
2226 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2227 op1 = expand_normal (arg1);
2228
2229 start_sequence ();
2230
2231 /* Compute into RESULT.
2232 Set RESULT to wherever the result comes back. */
2233 result = expand_binop (mode, builtin_optab, op0, op1,
2234 result, 0, OPTAB_DIRECT);
2235
2236 /* If we were unable to expand via the builtin, stop the sequence
2237 (without outputting the insns) and call to the library function
2238 with the stabilized argument list. */
2239 if (result == 0)
2240 {
2241 end_sequence ();
2242 return expand_call (exp, target, target == const0_rtx);
2243 }
2244
2245 if (errno_set)
2246 expand_errno_check (exp, result);
2247
2248 /* Output the entire sequence. */
2249 insns = get_insns ();
2250 end_sequence ();
2251 emit_insn (insns);
2252
2253 return result;
2254 }
2255
2256 /* Expand a call to the builtin trinary math functions (fma).
2257 Return NULL_RTX if a normal call should be emitted rather than expanding the
2258 function in-line. EXP is the expression that is a call to the builtin
2259 function; if convenient, the result should be placed in TARGET.
2260 SUBTARGET may be used as the target for computing one of EXP's
2261 operands. */
2262
2263 static rtx
2264 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2265 {
2266 optab builtin_optab;
2267 rtx op0, op1, op2, insns, result;
2268 tree fndecl = get_callee_fndecl (exp);
2269 tree arg0, arg1, arg2;
2270 enum machine_mode mode;
2271
2272 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2273 return NULL_RTX;
2274
2275 arg0 = CALL_EXPR_ARG (exp, 0);
2276 arg1 = CALL_EXPR_ARG (exp, 1);
2277 arg2 = CALL_EXPR_ARG (exp, 2);
2278
2279 switch (DECL_FUNCTION_CODE (fndecl))
2280 {
2281 CASE_FLT_FN (BUILT_IN_FMA):
2282 builtin_optab = fma_optab; break;
2283 default:
2284 gcc_unreachable ();
2285 }
2286
2287 /* Make a suitable register to place result in. */
2288 mode = TYPE_MODE (TREE_TYPE (exp));
2289
2290 /* Before working hard, check whether the instruction is available. */
2291 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2292 return NULL_RTX;
2293
2294 result = gen_reg_rtx (mode);
2295
2296 /* Always stabilize the argument list. */
2297 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2298 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2299 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2300
2301 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2302 op1 = expand_normal (arg1);
2303 op2 = expand_normal (arg2);
2304
2305 start_sequence ();
2306
2307 /* Compute into RESULT.
2308 Set RESULT to wherever the result comes back. */
2309 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2310 result, 0);
2311
2312 /* If we were unable to expand via the builtin, stop the sequence
2313 (without outputting the insns) and call to the library function
2314 with the stabilized argument list. */
2315 if (result == 0)
2316 {
2317 end_sequence ();
2318 return expand_call (exp, target, target == const0_rtx);
2319 }
2320
2321 /* Output the entire sequence. */
2322 insns = get_insns ();
2323 end_sequence ();
2324 emit_insn (insns);
2325
2326 return result;
2327 }
2328
2329 /* Expand a call to the builtin sin and cos math functions.
2330 Return NULL_RTX if a normal call should be emitted rather than expanding the
2331 function in-line. EXP is the expression that is a call to the builtin
2332 function; if convenient, the result should be placed in TARGET.
2333 SUBTARGET may be used as the target for computing one of EXP's
2334 operands. */
2335
2336 static rtx
2337 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2338 {
2339 optab builtin_optab;
2340 rtx op0, insns;
2341 tree fndecl = get_callee_fndecl (exp);
2342 enum machine_mode mode;
2343 tree arg;
2344
2345 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2346 return NULL_RTX;
2347
2348 arg = CALL_EXPR_ARG (exp, 0);
2349
2350 switch (DECL_FUNCTION_CODE (fndecl))
2351 {
2352 CASE_FLT_FN (BUILT_IN_SIN):
2353 CASE_FLT_FN (BUILT_IN_COS):
2354 builtin_optab = sincos_optab; break;
2355 default:
2356 gcc_unreachable ();
2357 }
2358
2359 /* Make a suitable register to place result in. */
2360 mode = TYPE_MODE (TREE_TYPE (exp));
2361
2362 /* Check if sincos insn is available, otherwise fallback
2363 to sin or cos insn. */
2364 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2365 switch (DECL_FUNCTION_CODE (fndecl))
2366 {
2367 CASE_FLT_FN (BUILT_IN_SIN):
2368 builtin_optab = sin_optab; break;
2369 CASE_FLT_FN (BUILT_IN_COS):
2370 builtin_optab = cos_optab; break;
2371 default:
2372 gcc_unreachable ();
2373 }
2374
2375 /* Before working hard, check whether the instruction is available. */
2376 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2377 {
2378 rtx result = gen_reg_rtx (mode);
2379
2380 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2381 need to expand the argument again. This way, we will not perform
2382 side-effects more the once. */
2383 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2384
2385 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2386
2387 start_sequence ();
2388
2389 /* Compute into RESULT.
2390 Set RESULT to wherever the result comes back. */
2391 if (builtin_optab == sincos_optab)
2392 {
2393 int ok;
2394
2395 switch (DECL_FUNCTION_CODE (fndecl))
2396 {
2397 CASE_FLT_FN (BUILT_IN_SIN):
2398 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2399 break;
2400 CASE_FLT_FN (BUILT_IN_COS):
2401 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2402 break;
2403 default:
2404 gcc_unreachable ();
2405 }
2406 gcc_assert (ok);
2407 }
2408 else
2409 result = expand_unop (mode, builtin_optab, op0, result, 0);
2410
2411 if (result != 0)
2412 {
2413 /* Output the entire sequence. */
2414 insns = get_insns ();
2415 end_sequence ();
2416 emit_insn (insns);
2417 return result;
2418 }
2419
2420 /* If we were unable to expand via the builtin, stop the sequence
2421 (without outputting the insns) and call to the library function
2422 with the stabilized argument list. */
2423 end_sequence ();
2424 }
2425
2426 return expand_call (exp, target, target == const0_rtx);
2427 }
2428
2429 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2430 return an RTL instruction code that implements the functionality.
2431 If that isn't possible or available return CODE_FOR_nothing. */
2432
2433 static enum insn_code
2434 interclass_mathfn_icode (tree arg, tree fndecl)
2435 {
2436 bool errno_set = false;
2437 optab builtin_optab = unknown_optab;
2438 enum machine_mode mode;
2439
2440 switch (DECL_FUNCTION_CODE (fndecl))
2441 {
2442 CASE_FLT_FN (BUILT_IN_ILOGB):
2443 errno_set = true; builtin_optab = ilogb_optab; break;
2444 CASE_FLT_FN (BUILT_IN_ISINF):
2445 builtin_optab = isinf_optab; break;
2446 case BUILT_IN_ISNORMAL:
2447 case BUILT_IN_ISFINITE:
2448 CASE_FLT_FN (BUILT_IN_FINITE):
2449 case BUILT_IN_FINITED32:
2450 case BUILT_IN_FINITED64:
2451 case BUILT_IN_FINITED128:
2452 case BUILT_IN_ISINFD32:
2453 case BUILT_IN_ISINFD64:
2454 case BUILT_IN_ISINFD128:
2455 /* These builtins have no optabs (yet). */
2456 break;
2457 default:
2458 gcc_unreachable ();
2459 }
2460
2461 /* There's no easy way to detect the case we need to set EDOM. */
2462 if (flag_errno_math && errno_set)
2463 return CODE_FOR_nothing;
2464
2465 /* Optab mode depends on the mode of the input argument. */
2466 mode = TYPE_MODE (TREE_TYPE (arg));
2467
2468 if (builtin_optab)
2469 return optab_handler (builtin_optab, mode);
2470 return CODE_FOR_nothing;
2471 }
2472
2473 /* Expand a call to one of the builtin math functions that operate on
2474 floating point argument and output an integer result (ilogb, isinf,
2475 isnan, etc).
2476 Return 0 if a normal call should be emitted rather than expanding the
2477 function in-line. EXP is the expression that is a call to the builtin
2478 function; if convenient, the result should be placed in TARGET. */
2479
2480 static rtx
2481 expand_builtin_interclass_mathfn (tree exp, rtx target)
2482 {
2483 enum insn_code icode = CODE_FOR_nothing;
2484 rtx op0;
2485 tree fndecl = get_callee_fndecl (exp);
2486 enum machine_mode mode;
2487 tree arg;
2488
2489 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2490 return NULL_RTX;
2491
2492 arg = CALL_EXPR_ARG (exp, 0);
2493 icode = interclass_mathfn_icode (arg, fndecl);
2494 mode = TYPE_MODE (TREE_TYPE (arg));
2495
2496 if (icode != CODE_FOR_nothing)
2497 {
2498 struct expand_operand ops[1];
2499 rtx last = get_last_insn ();
2500 tree orig_arg = arg;
2501
2502 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2503 need to expand the argument again. This way, we will not perform
2504 side-effects more the once. */
2505 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2506
2507 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2508
2509 if (mode != GET_MODE (op0))
2510 op0 = convert_to_mode (mode, op0, 0);
2511
2512 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2513 if (maybe_legitimize_operands (icode, 0, 1, ops)
2514 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2515 return ops[0].value;
2516
2517 delete_insns_since (last);
2518 CALL_EXPR_ARG (exp, 0) = orig_arg;
2519 }
2520
2521 return NULL_RTX;
2522 }
2523
2524 /* Expand a call to the builtin sincos math function.
2525 Return NULL_RTX if a normal call should be emitted rather than expanding the
2526 function in-line. EXP is the expression that is a call to the builtin
2527 function. */
2528
2529 static rtx
2530 expand_builtin_sincos (tree exp)
2531 {
2532 rtx op0, op1, op2, target1, target2;
2533 enum machine_mode mode;
2534 tree arg, sinp, cosp;
2535 int result;
2536 location_t loc = EXPR_LOCATION (exp);
2537 tree alias_type, alias_off;
2538
2539 if (!validate_arglist (exp, REAL_TYPE,
2540 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2541 return NULL_RTX;
2542
2543 arg = CALL_EXPR_ARG (exp, 0);
2544 sinp = CALL_EXPR_ARG (exp, 1);
2545 cosp = CALL_EXPR_ARG (exp, 2);
2546
2547 /* Make a suitable register to place result in. */
2548 mode = TYPE_MODE (TREE_TYPE (arg));
2549
2550 /* Check if sincos insn is available, otherwise emit the call. */
2551 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2552 return NULL_RTX;
2553
2554 target1 = gen_reg_rtx (mode);
2555 target2 = gen_reg_rtx (mode);
2556
2557 op0 = expand_normal (arg);
2558 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2559 alias_off = build_int_cst (alias_type, 0);
2560 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2561 sinp, alias_off));
2562 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2563 cosp, alias_off));
2564
2565 /* Compute into target1 and target2.
2566 Set TARGET to wherever the result comes back. */
2567 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2568 gcc_assert (result);
2569
2570 /* Move target1 and target2 to the memory locations indicated
2571 by op1 and op2. */
2572 emit_move_insn (op1, target1);
2573 emit_move_insn (op2, target2);
2574
2575 return const0_rtx;
2576 }
2577
2578 /* Expand a call to the internal cexpi builtin to the sincos math function.
2579 EXP is the expression that is a call to the builtin function; if convenient,
2580 the result should be placed in TARGET. */
2581
2582 static rtx
2583 expand_builtin_cexpi (tree exp, rtx target)
2584 {
2585 tree fndecl = get_callee_fndecl (exp);
2586 tree arg, type;
2587 enum machine_mode mode;
2588 rtx op0, op1, op2;
2589 location_t loc = EXPR_LOCATION (exp);
2590
2591 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2592 return NULL_RTX;
2593
2594 arg = CALL_EXPR_ARG (exp, 0);
2595 type = TREE_TYPE (arg);
2596 mode = TYPE_MODE (TREE_TYPE (arg));
2597
2598 /* Try expanding via a sincos optab, fall back to emitting a libcall
2599 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2600 is only generated from sincos, cexp or if we have either of them. */
2601 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2602 {
2603 op1 = gen_reg_rtx (mode);
2604 op2 = gen_reg_rtx (mode);
2605
2606 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2607
2608 /* Compute into op1 and op2. */
2609 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2610 }
2611 else if (targetm.libc_has_function (function_sincos))
2612 {
2613 tree call, fn = NULL_TREE;
2614 tree top1, top2;
2615 rtx op1a, op2a;
2616
2617 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2618 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2619 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2620 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2621 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2622 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2623 else
2624 gcc_unreachable ();
2625
2626 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2627 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2628 op1a = copy_addr_to_reg (XEXP (op1, 0));
2629 op2a = copy_addr_to_reg (XEXP (op2, 0));
2630 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2631 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2632
2633 /* Make sure not to fold the sincos call again. */
2634 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2635 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2636 call, 3, arg, top1, top2));
2637 }
2638 else
2639 {
2640 tree call, fn = NULL_TREE, narg;
2641 tree ctype = build_complex_type (type);
2642
2643 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2644 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2645 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2646 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2647 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2648 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2649 else
2650 gcc_unreachable ();
2651
2652 /* If we don't have a decl for cexp create one. This is the
2653 friendliest fallback if the user calls __builtin_cexpi
2654 without full target C99 function support. */
2655 if (fn == NULL_TREE)
2656 {
2657 tree fntype;
2658 const char *name = NULL;
2659
2660 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2661 name = "cexpf";
2662 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2663 name = "cexp";
2664 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2665 name = "cexpl";
2666
2667 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2668 fn = build_fn_decl (name, fntype);
2669 }
2670
2671 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2672 build_real (type, dconst0), arg);
2673
2674 /* Make sure not to fold the cexp call again. */
2675 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2676 return expand_expr (build_call_nary (ctype, call, 1, narg),
2677 target, VOIDmode, EXPAND_NORMAL);
2678 }
2679
2680 /* Now build the proper return type. */
2681 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2682 make_tree (TREE_TYPE (arg), op2),
2683 make_tree (TREE_TYPE (arg), op1)),
2684 target, VOIDmode, EXPAND_NORMAL);
2685 }
2686
2687 /* Conveniently construct a function call expression. FNDECL names the
2688 function to be called, N is the number of arguments, and the "..."
2689 parameters are the argument expressions. Unlike build_call_exr
2690 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2691
2692 static tree
2693 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2694 {
2695 va_list ap;
2696 tree fntype = TREE_TYPE (fndecl);
2697 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2698
2699 va_start (ap, n);
2700 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2701 va_end (ap);
2702 SET_EXPR_LOCATION (fn, loc);
2703 return fn;
2704 }
2705
2706 /* Expand a call to one of the builtin rounding functions gcc defines
2707 as an extension (lfloor and lceil). As these are gcc extensions we
2708 do not need to worry about setting errno to EDOM.
2709 If expanding via optab fails, lower expression to (int)(floor(x)).
2710 EXP is the expression that is a call to the builtin function;
2711 if convenient, the result should be placed in TARGET. */
2712
2713 static rtx
2714 expand_builtin_int_roundingfn (tree exp, rtx target)
2715 {
2716 convert_optab builtin_optab;
2717 rtx op0, insns, tmp;
2718 tree fndecl = get_callee_fndecl (exp);
2719 enum built_in_function fallback_fn;
2720 tree fallback_fndecl;
2721 enum machine_mode mode;
2722 tree arg;
2723
2724 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2725 gcc_unreachable ();
2726
2727 arg = CALL_EXPR_ARG (exp, 0);
2728
2729 switch (DECL_FUNCTION_CODE (fndecl))
2730 {
2731 CASE_FLT_FN (BUILT_IN_ICEIL):
2732 CASE_FLT_FN (BUILT_IN_LCEIL):
2733 CASE_FLT_FN (BUILT_IN_LLCEIL):
2734 builtin_optab = lceil_optab;
2735 fallback_fn = BUILT_IN_CEIL;
2736 break;
2737
2738 CASE_FLT_FN (BUILT_IN_IFLOOR):
2739 CASE_FLT_FN (BUILT_IN_LFLOOR):
2740 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2741 builtin_optab = lfloor_optab;
2742 fallback_fn = BUILT_IN_FLOOR;
2743 break;
2744
2745 default:
2746 gcc_unreachable ();
2747 }
2748
2749 /* Make a suitable register to place result in. */
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2751
2752 target = gen_reg_rtx (mode);
2753
2754 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2755 need to expand the argument again. This way, we will not perform
2756 side-effects more the once. */
2757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2758
2759 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2760
2761 start_sequence ();
2762
2763 /* Compute into TARGET. */
2764 if (expand_sfix_optab (target, op0, builtin_optab))
2765 {
2766 /* Output the entire sequence. */
2767 insns = get_insns ();
2768 end_sequence ();
2769 emit_insn (insns);
2770 return target;
2771 }
2772
2773 /* If we were unable to expand via the builtin, stop the sequence
2774 (without outputting the insns). */
2775 end_sequence ();
2776
2777 /* Fall back to floating point rounding optab. */
2778 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2779
2780 /* For non-C99 targets we may end up without a fallback fndecl here
2781 if the user called __builtin_lfloor directly. In this case emit
2782 a call to the floor/ceil variants nevertheless. This should result
2783 in the best user experience for not full C99 targets. */
2784 if (fallback_fndecl == NULL_TREE)
2785 {
2786 tree fntype;
2787 const char *name = NULL;
2788
2789 switch (DECL_FUNCTION_CODE (fndecl))
2790 {
2791 case BUILT_IN_ICEIL:
2792 case BUILT_IN_LCEIL:
2793 case BUILT_IN_LLCEIL:
2794 name = "ceil";
2795 break;
2796 case BUILT_IN_ICEILF:
2797 case BUILT_IN_LCEILF:
2798 case BUILT_IN_LLCEILF:
2799 name = "ceilf";
2800 break;
2801 case BUILT_IN_ICEILL:
2802 case BUILT_IN_LCEILL:
2803 case BUILT_IN_LLCEILL:
2804 name = "ceill";
2805 break;
2806 case BUILT_IN_IFLOOR:
2807 case BUILT_IN_LFLOOR:
2808 case BUILT_IN_LLFLOOR:
2809 name = "floor";
2810 break;
2811 case BUILT_IN_IFLOORF:
2812 case BUILT_IN_LFLOORF:
2813 case BUILT_IN_LLFLOORF:
2814 name = "floorf";
2815 break;
2816 case BUILT_IN_IFLOORL:
2817 case BUILT_IN_LFLOORL:
2818 case BUILT_IN_LLFLOORL:
2819 name = "floorl";
2820 break;
2821 default:
2822 gcc_unreachable ();
2823 }
2824
2825 fntype = build_function_type_list (TREE_TYPE (arg),
2826 TREE_TYPE (arg), NULL_TREE);
2827 fallback_fndecl = build_fn_decl (name, fntype);
2828 }
2829
2830 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2831
2832 tmp = expand_normal (exp);
2833 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2834
2835 /* Truncate the result of floating point optab to integer
2836 via expand_fix (). */
2837 target = gen_reg_rtx (mode);
2838 expand_fix (target, tmp, 0);
2839
2840 return target;
2841 }
2842
2843 /* Expand a call to one of the builtin math functions doing integer
2844 conversion (lrint).
2845 Return 0 if a normal call should be emitted rather than expanding the
2846 function in-line. EXP is the expression that is a call to the builtin
2847 function; if convenient, the result should be placed in TARGET. */
2848
2849 static rtx
2850 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2851 {
2852 convert_optab builtin_optab;
2853 rtx op0, insns;
2854 tree fndecl = get_callee_fndecl (exp);
2855 tree arg;
2856 enum machine_mode mode;
2857 enum built_in_function fallback_fn = BUILT_IN_NONE;
2858
2859 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2860 gcc_unreachable ();
2861
2862 arg = CALL_EXPR_ARG (exp, 0);
2863
2864 switch (DECL_FUNCTION_CODE (fndecl))
2865 {
2866 CASE_FLT_FN (BUILT_IN_IRINT):
2867 fallback_fn = BUILT_IN_LRINT;
2868 /* FALLTHRU */
2869 CASE_FLT_FN (BUILT_IN_LRINT):
2870 CASE_FLT_FN (BUILT_IN_LLRINT):
2871 builtin_optab = lrint_optab;
2872 break;
2873
2874 CASE_FLT_FN (BUILT_IN_IROUND):
2875 fallback_fn = BUILT_IN_LROUND;
2876 /* FALLTHRU */
2877 CASE_FLT_FN (BUILT_IN_LROUND):
2878 CASE_FLT_FN (BUILT_IN_LLROUND):
2879 builtin_optab = lround_optab;
2880 break;
2881
2882 default:
2883 gcc_unreachable ();
2884 }
2885
2886 /* There's no easy way to detect the case we need to set EDOM. */
2887 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2888 return NULL_RTX;
2889
2890 /* Make a suitable register to place result in. */
2891 mode = TYPE_MODE (TREE_TYPE (exp));
2892
2893 /* There's no easy way to detect the case we need to set EDOM. */
2894 if (!flag_errno_math)
2895 {
2896 rtx result = gen_reg_rtx (mode);
2897
2898 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2899 need to expand the argument again. This way, we will not perform
2900 side-effects more the once. */
2901 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2902
2903 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2904
2905 start_sequence ();
2906
2907 if (expand_sfix_optab (result, op0, builtin_optab))
2908 {
2909 /* Output the entire sequence. */
2910 insns = get_insns ();
2911 end_sequence ();
2912 emit_insn (insns);
2913 return result;
2914 }
2915
2916 /* If we were unable to expand via the builtin, stop the sequence
2917 (without outputting the insns) and call to the library function
2918 with the stabilized argument list. */
2919 end_sequence ();
2920 }
2921
2922 if (fallback_fn != BUILT_IN_NONE)
2923 {
2924 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2925 targets, (int) round (x) should never be transformed into
2926 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2927 a call to lround in the hope that the target provides at least some
2928 C99 functions. This should result in the best user experience for
2929 not full C99 targets. */
2930 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2931 fallback_fn, 0);
2932
2933 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2934 fallback_fndecl, 1, arg);
2935
2936 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2937 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2938 return convert_to_mode (mode, target, 0);
2939 }
2940
2941 return expand_call (exp, target, target == const0_rtx);
2942 }
2943
2944 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2945 a normal call should be emitted rather than expanding the function
2946 in-line. EXP is the expression that is a call to the builtin
2947 function; if convenient, the result should be placed in TARGET. */
2948
2949 static rtx
2950 expand_builtin_powi (tree exp, rtx target)
2951 {
2952 tree arg0, arg1;
2953 rtx op0, op1;
2954 enum machine_mode mode;
2955 enum machine_mode mode2;
2956
2957 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2958 return NULL_RTX;
2959
2960 arg0 = CALL_EXPR_ARG (exp, 0);
2961 arg1 = CALL_EXPR_ARG (exp, 1);
2962 mode = TYPE_MODE (TREE_TYPE (exp));
2963
2964 /* Emit a libcall to libgcc. */
2965
2966 /* Mode of the 2nd argument must match that of an int. */
2967 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2968
2969 if (target == NULL_RTX)
2970 target = gen_reg_rtx (mode);
2971
2972 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2973 if (GET_MODE (op0) != mode)
2974 op0 = convert_to_mode (mode, op0, 0);
2975 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2976 if (GET_MODE (op1) != mode2)
2977 op1 = convert_to_mode (mode2, op1, 0);
2978
2979 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2980 target, LCT_CONST, mode, 2,
2981 op0, mode, op1, mode2);
2982
2983 return target;
2984 }
2985
2986 /* Expand expression EXP which is a call to the strlen builtin. Return
2987 NULL_RTX if we failed the caller should emit a normal call, otherwise
2988 try to get the result in TARGET, if convenient. */
2989
2990 static rtx
2991 expand_builtin_strlen (tree exp, rtx target,
2992 enum machine_mode target_mode)
2993 {
2994 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2995 return NULL_RTX;
2996 else
2997 {
2998 struct expand_operand ops[4];
2999 rtx pat;
3000 tree len;
3001 tree src = CALL_EXPR_ARG (exp, 0);
3002 rtx src_reg, before_strlen;
3003 enum machine_mode insn_mode = target_mode;
3004 enum insn_code icode = CODE_FOR_nothing;
3005 unsigned int align;
3006
3007 /* If the length can be computed at compile-time, return it. */
3008 len = c_strlen (src, 0);
3009 if (len)
3010 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3011
3012 /* If the length can be computed at compile-time and is constant
3013 integer, but there are side-effects in src, evaluate
3014 src for side-effects, then return len.
3015 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3016 can be optimized into: i++; x = 3; */
3017 len = c_strlen (src, 1);
3018 if (len && TREE_CODE (len) == INTEGER_CST)
3019 {
3020 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3021 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3022 }
3023
3024 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3025
3026 /* If SRC is not a pointer type, don't do this operation inline. */
3027 if (align == 0)
3028 return NULL_RTX;
3029
3030 /* Bail out if we can't compute strlen in the right mode. */
3031 while (insn_mode != VOIDmode)
3032 {
3033 icode = optab_handler (strlen_optab, insn_mode);
3034 if (icode != CODE_FOR_nothing)
3035 break;
3036
3037 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3038 }
3039 if (insn_mode == VOIDmode)
3040 return NULL_RTX;
3041
3042 /* Make a place to hold the source address. We will not expand
3043 the actual source until we are sure that the expansion will
3044 not fail -- there are trees that cannot be expanded twice. */
3045 src_reg = gen_reg_rtx (Pmode);
3046
3047 /* Mark the beginning of the strlen sequence so we can emit the
3048 source operand later. */
3049 before_strlen = get_last_insn ();
3050
3051 create_output_operand (&ops[0], target, insn_mode);
3052 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3053 create_integer_operand (&ops[2], 0);
3054 create_integer_operand (&ops[3], align);
3055 if (!maybe_expand_insn (icode, 4, ops))
3056 return NULL_RTX;
3057
3058 /* Now that we are assured of success, expand the source. */
3059 start_sequence ();
3060 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3061 if (pat != src_reg)
3062 {
3063 #ifdef POINTERS_EXTEND_UNSIGNED
3064 if (GET_MODE (pat) != Pmode)
3065 pat = convert_to_mode (Pmode, pat,
3066 POINTERS_EXTEND_UNSIGNED);
3067 #endif
3068 emit_move_insn (src_reg, pat);
3069 }
3070 pat = get_insns ();
3071 end_sequence ();
3072
3073 if (before_strlen)
3074 emit_insn_after (pat, before_strlen);
3075 else
3076 emit_insn_before (pat, get_insns ());
3077
3078 /* Return the value in the proper mode for this function. */
3079 if (GET_MODE (ops[0].value) == target_mode)
3080 target = ops[0].value;
3081 else if (target != 0)
3082 convert_move (target, ops[0].value, 0);
3083 else
3084 target = convert_to_mode (target_mode, ops[0].value, 0);
3085
3086 return target;
3087 }
3088 }
3089
3090 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3091 bytes from constant string DATA + OFFSET and return it as target
3092 constant. */
3093
3094 static rtx
3095 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3096 enum machine_mode mode)
3097 {
3098 const char *str = (const char *) data;
3099
3100 gcc_assert (offset >= 0
3101 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3102 <= strlen (str) + 1));
3103
3104 return c_readstr (str + offset, mode);
3105 }
3106
3107 /* LEN specify length of the block of memcpy/memset operation.
3108 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3109 In some cases we can make very likely guess on max size, then we
3110 set it into PROBABLE_MAX_SIZE. */
3111
3112 static void
3113 determine_block_size (tree len, rtx len_rtx,
3114 unsigned HOST_WIDE_INT *min_size,
3115 unsigned HOST_WIDE_INT *max_size,
3116 unsigned HOST_WIDE_INT *probable_max_size)
3117 {
3118 if (CONST_INT_P (len_rtx))
3119 {
3120 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3121 return;
3122 }
3123 else
3124 {
3125 double_int min, max;
3126 enum value_range_type range_type = VR_UNDEFINED;
3127
3128 /* Determine bounds from the type. */
3129 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3130 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3131 else
3132 *min_size = 0;
3133 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3134 *probable_max_size = *max_size
3135 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3136 else
3137 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3138
3139 if (TREE_CODE (len) == SSA_NAME)
3140 range_type = get_range_info (len, &min, &max);
3141 if (range_type == VR_RANGE)
3142 {
3143 if (min.fits_uhwi () && *min_size < min.to_uhwi ())
3144 *min_size = min.to_uhwi ();
3145 if (max.fits_uhwi () && *max_size > max.to_uhwi ())
3146 *probable_max_size = *max_size = max.to_uhwi ();
3147 }
3148 else if (range_type == VR_ANTI_RANGE)
3149 {
3150 /* Anti range 0...N lets us to determine minimal size to N+1. */
3151 if (min.is_zero ())
3152 {
3153 if ((max + double_int_one).fits_uhwi ())
3154 *min_size = (max + double_int_one).to_uhwi ();
3155 }
3156 /* Code like
3157
3158 int n;
3159 if (n < 100)
3160 memcpy (a, b, n)
3161
3162 Produce anti range allowing negative values of N. We still
3163 can use the information and make a guess that N is not negative.
3164 */
3165 else if (!max.ule (double_int_one.lshift (30))
3166 && min.fits_uhwi ())
3167 *probable_max_size = min.to_uhwi () - 1;
3168 }
3169 }
3170 gcc_checking_assert (*max_size <=
3171 (unsigned HOST_WIDE_INT)
3172 GET_MODE_MASK (GET_MODE (len_rtx)));
3173 }
3174
3175 /* Expand a call EXP to the memcpy builtin.
3176 Return NULL_RTX if we failed, the caller should emit a normal call,
3177 otherwise try to get the result in TARGET, if convenient (and in
3178 mode MODE if that's convenient). */
3179
3180 static rtx
3181 expand_builtin_memcpy (tree exp, rtx target)
3182 {
3183 if (!validate_arglist (exp,
3184 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3185 return NULL_RTX;
3186 else
3187 {
3188 tree dest = CALL_EXPR_ARG (exp, 0);
3189 tree src = CALL_EXPR_ARG (exp, 1);
3190 tree len = CALL_EXPR_ARG (exp, 2);
3191 const char *src_str;
3192 unsigned int src_align = get_pointer_alignment (src);
3193 unsigned int dest_align = get_pointer_alignment (dest);
3194 rtx dest_mem, src_mem, dest_addr, len_rtx;
3195 HOST_WIDE_INT expected_size = -1;
3196 unsigned int expected_align = 0;
3197 unsigned HOST_WIDE_INT min_size;
3198 unsigned HOST_WIDE_INT max_size;
3199 unsigned HOST_WIDE_INT probable_max_size;
3200
3201 /* If DEST is not a pointer type, call the normal function. */
3202 if (dest_align == 0)
3203 return NULL_RTX;
3204
3205 /* If either SRC is not a pointer type, don't do this
3206 operation in-line. */
3207 if (src_align == 0)
3208 return NULL_RTX;
3209
3210 if (currently_expanding_gimple_stmt)
3211 stringop_block_profile (currently_expanding_gimple_stmt,
3212 &expected_align, &expected_size);
3213
3214 if (expected_align < dest_align)
3215 expected_align = dest_align;
3216 dest_mem = get_memory_rtx (dest, len);
3217 set_mem_align (dest_mem, dest_align);
3218 len_rtx = expand_normal (len);
3219 determine_block_size (len, len_rtx, &min_size, &max_size,
3220 &probable_max_size);
3221 src_str = c_getstr (src);
3222
3223 /* If SRC is a string constant and block move would be done
3224 by pieces, we can avoid loading the string from memory
3225 and only stored the computed constants. */
3226 if (src_str
3227 && CONST_INT_P (len_rtx)
3228 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3229 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3230 CONST_CAST (char *, src_str),
3231 dest_align, false))
3232 {
3233 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3234 builtin_memcpy_read_str,
3235 CONST_CAST (char *, src_str),
3236 dest_align, false, 0);
3237 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3238 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3239 return dest_mem;
3240 }
3241
3242 src_mem = get_memory_rtx (src, len);
3243 set_mem_align (src_mem, src_align);
3244
3245 /* Copy word part most expediently. */
3246 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3247 CALL_EXPR_TAILCALL (exp)
3248 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3249 expected_align, expected_size,
3250 min_size, max_size, probable_max_size);
3251
3252 if (dest_addr == 0)
3253 {
3254 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3255 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3256 }
3257 return dest_addr;
3258 }
3259 }
3260
3261 /* Expand a call EXP to the mempcpy builtin.
3262 Return NULL_RTX if we failed; the caller should emit a normal call,
3263 otherwise try to get the result in TARGET, if convenient (and in
3264 mode MODE if that's convenient). If ENDP is 0 return the
3265 destination pointer, if ENDP is 1 return the end pointer ala
3266 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3267 stpcpy. */
3268
3269 static rtx
3270 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3271 {
3272 if (!validate_arglist (exp,
3273 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3276 {
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 1);
3279 tree len = CALL_EXPR_ARG (exp, 2);
3280 return expand_builtin_mempcpy_args (dest, src, len,
3281 target, mode, /*endp=*/ 1);
3282 }
3283 }
3284
3285 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3286 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3287 so that this can also be called without constructing an actual CALL_EXPR.
3288 The other arguments and return value are the same as for
3289 expand_builtin_mempcpy. */
3290
3291 static rtx
3292 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3293 rtx target, enum machine_mode mode, int endp)
3294 {
3295 /* If return value is ignored, transform mempcpy into memcpy. */
3296 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3297 {
3298 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3299 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3300 dest, src, len);
3301 return expand_expr (result, target, mode, EXPAND_NORMAL);
3302 }
3303 else
3304 {
3305 const char *src_str;
3306 unsigned int src_align = get_pointer_alignment (src);
3307 unsigned int dest_align = get_pointer_alignment (dest);
3308 rtx dest_mem, src_mem, len_rtx;
3309
3310 /* If either SRC or DEST is not a pointer type, don't do this
3311 operation in-line. */
3312 if (dest_align == 0 || src_align == 0)
3313 return NULL_RTX;
3314
3315 /* If LEN is not constant, call the normal function. */
3316 if (! tree_fits_uhwi_p (len))
3317 return NULL_RTX;
3318
3319 len_rtx = expand_normal (len);
3320 src_str = c_getstr (src);
3321
3322 /* If SRC is a string constant and block move would be done
3323 by pieces, we can avoid loading the string from memory
3324 and only stored the computed constants. */
3325 if (src_str
3326 && CONST_INT_P (len_rtx)
3327 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3328 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3329 CONST_CAST (char *, src_str),
3330 dest_align, false))
3331 {
3332 dest_mem = get_memory_rtx (dest, len);
3333 set_mem_align (dest_mem, dest_align);
3334 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3335 builtin_memcpy_read_str,
3336 CONST_CAST (char *, src_str),
3337 dest_align, false, endp);
3338 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3339 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3340 return dest_mem;
3341 }
3342
3343 if (CONST_INT_P (len_rtx)
3344 && can_move_by_pieces (INTVAL (len_rtx),
3345 MIN (dest_align, src_align)))
3346 {
3347 dest_mem = get_memory_rtx (dest, len);
3348 set_mem_align (dest_mem, dest_align);
3349 src_mem = get_memory_rtx (src, len);
3350 set_mem_align (src_mem, src_align);
3351 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3352 MIN (dest_align, src_align), endp);
3353 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3354 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3355 return dest_mem;
3356 }
3357
3358 return NULL_RTX;
3359 }
3360 }
3361
3362 #ifndef HAVE_movstr
3363 # define HAVE_movstr 0
3364 # define CODE_FOR_movstr CODE_FOR_nothing
3365 #endif
3366
3367 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3368 we failed, the caller should emit a normal call, otherwise try to
3369 get the result in TARGET, if convenient. If ENDP is 0 return the
3370 destination pointer, if ENDP is 1 return the end pointer ala
3371 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3372 stpcpy. */
3373
3374 static rtx
3375 expand_movstr (tree dest, tree src, rtx target, int endp)
3376 {
3377 struct expand_operand ops[3];
3378 rtx dest_mem;
3379 rtx src_mem;
3380
3381 if (!HAVE_movstr)
3382 return NULL_RTX;
3383
3384 dest_mem = get_memory_rtx (dest, NULL);
3385 src_mem = get_memory_rtx (src, NULL);
3386 if (!endp)
3387 {
3388 target = force_reg (Pmode, XEXP (dest_mem, 0));
3389 dest_mem = replace_equiv_address (dest_mem, target);
3390 }
3391
3392 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3393 create_fixed_operand (&ops[1], dest_mem);
3394 create_fixed_operand (&ops[2], src_mem);
3395 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3396 return NULL_RTX;
3397
3398 if (endp && target != const0_rtx)
3399 {
3400 target = ops[0].value;
3401 /* movstr is supposed to set end to the address of the NUL
3402 terminator. If the caller requested a mempcpy-like return value,
3403 adjust it. */
3404 if (endp == 1)
3405 {
3406 rtx tem = plus_constant (GET_MODE (target),
3407 gen_lowpart (GET_MODE (target), target), 1);
3408 emit_move_insn (target, force_operand (tem, NULL_RTX));
3409 }
3410 }
3411 return target;
3412 }
3413
3414 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3415 NULL_RTX if we failed the caller should emit a normal call, otherwise
3416 try to get the result in TARGET, if convenient (and in mode MODE if that's
3417 convenient). */
3418
3419 static rtx
3420 expand_builtin_strcpy (tree exp, rtx target)
3421 {
3422 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3423 {
3424 tree dest = CALL_EXPR_ARG (exp, 0);
3425 tree src = CALL_EXPR_ARG (exp, 1);
3426 return expand_builtin_strcpy_args (dest, src, target);
3427 }
3428 return NULL_RTX;
3429 }
3430
3431 /* Helper function to do the actual work for expand_builtin_strcpy. The
3432 arguments to the builtin_strcpy call DEST and SRC are broken out
3433 so that this can also be called without constructing an actual CALL_EXPR.
3434 The other arguments and return value are the same as for
3435 expand_builtin_strcpy. */
3436
3437 static rtx
3438 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3439 {
3440 return expand_movstr (dest, src, target, /*endp=*/0);
3441 }
3442
3443 /* Expand a call EXP to the stpcpy builtin.
3444 Return NULL_RTX if we failed the caller should emit a normal call,
3445 otherwise try to get the result in TARGET, if convenient (and in
3446 mode MODE if that's convenient). */
3447
3448 static rtx
3449 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3450 {
3451 tree dst, src;
3452 location_t loc = EXPR_LOCATION (exp);
3453
3454 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3455 return NULL_RTX;
3456
3457 dst = CALL_EXPR_ARG (exp, 0);
3458 src = CALL_EXPR_ARG (exp, 1);
3459
3460 /* If return value is ignored, transform stpcpy into strcpy. */
3461 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3462 {
3463 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3464 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3465 return expand_expr (result, target, mode, EXPAND_NORMAL);
3466 }
3467 else
3468 {
3469 tree len, lenp1;
3470 rtx ret;
3471
3472 /* Ensure we get an actual string whose length can be evaluated at
3473 compile-time, not an expression containing a string. This is
3474 because the latter will potentially produce pessimized code
3475 when used to produce the return value. */
3476 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3477 return expand_movstr (dst, src, target, /*endp=*/2);
3478
3479 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3480 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3481 target, mode, /*endp=*/2);
3482
3483 if (ret)
3484 return ret;
3485
3486 if (TREE_CODE (len) == INTEGER_CST)
3487 {
3488 rtx len_rtx = expand_normal (len);
3489
3490 if (CONST_INT_P (len_rtx))
3491 {
3492 ret = expand_builtin_strcpy_args (dst, src, target);
3493
3494 if (ret)
3495 {
3496 if (! target)
3497 {
3498 if (mode != VOIDmode)
3499 target = gen_reg_rtx (mode);
3500 else
3501 target = gen_reg_rtx (GET_MODE (ret));
3502 }
3503 if (GET_MODE (target) != GET_MODE (ret))
3504 ret = gen_lowpart (GET_MODE (target), ret);
3505
3506 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3507 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3508 gcc_assert (ret);
3509
3510 return target;
3511 }
3512 }
3513 }
3514
3515 return expand_movstr (dst, src, target, /*endp=*/2);
3516 }
3517 }
3518
3519 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3520 bytes from constant string DATA + OFFSET and return it as target
3521 constant. */
3522
3523 rtx
3524 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3525 enum machine_mode mode)
3526 {
3527 const char *str = (const char *) data;
3528
3529 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3530 return const0_rtx;
3531
3532 return c_readstr (str + offset, mode);
3533 }
3534
3535 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3536 NULL_RTX if we failed the caller should emit a normal call. */
3537
3538 static rtx
3539 expand_builtin_strncpy (tree exp, rtx target)
3540 {
3541 location_t loc = EXPR_LOCATION (exp);
3542
3543 if (validate_arglist (exp,
3544 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3545 {
3546 tree dest = CALL_EXPR_ARG (exp, 0);
3547 tree src = CALL_EXPR_ARG (exp, 1);
3548 tree len = CALL_EXPR_ARG (exp, 2);
3549 tree slen = c_strlen (src, 1);
3550
3551 /* We must be passed a constant len and src parameter. */
3552 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3553 return NULL_RTX;
3554
3555 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3556
3557 /* We're required to pad with trailing zeros if the requested
3558 len is greater than strlen(s2)+1. In that case try to
3559 use store_by_pieces, if it fails, punt. */
3560 if (tree_int_cst_lt (slen, len))
3561 {
3562 unsigned int dest_align = get_pointer_alignment (dest);
3563 const char *p = c_getstr (src);
3564 rtx dest_mem;
3565
3566 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3567 || !can_store_by_pieces (tree_to_uhwi (len),
3568 builtin_strncpy_read_str,
3569 CONST_CAST (char *, p),
3570 dest_align, false))
3571 return NULL_RTX;
3572
3573 dest_mem = get_memory_rtx (dest, len);
3574 store_by_pieces (dest_mem, tree_to_uhwi (len),
3575 builtin_strncpy_read_str,
3576 CONST_CAST (char *, p), dest_align, false, 0);
3577 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3578 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3579 return dest_mem;
3580 }
3581 }
3582 return NULL_RTX;
3583 }
3584
3585 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3586 bytes from constant string DATA + OFFSET and return it as target
3587 constant. */
3588
3589 rtx
3590 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3591 enum machine_mode mode)
3592 {
3593 const char *c = (const char *) data;
3594 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3595
3596 memset (p, *c, GET_MODE_SIZE (mode));
3597
3598 return c_readstr (p, mode);
3599 }
3600
3601 /* Callback routine for store_by_pieces. Return the RTL of a register
3602 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3603 char value given in the RTL register data. For example, if mode is
3604 4 bytes wide, return the RTL for 0x01010101*data. */
3605
3606 static rtx
3607 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3608 enum machine_mode mode)
3609 {
3610 rtx target, coeff;
3611 size_t size;
3612 char *p;
3613
3614 size = GET_MODE_SIZE (mode);
3615 if (size == 1)
3616 return (rtx) data;
3617
3618 p = XALLOCAVEC (char, size);
3619 memset (p, 1, size);
3620 coeff = c_readstr (p, mode);
3621
3622 target = convert_to_mode (mode, (rtx) data, 1);
3623 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3624 return force_reg (mode, target);
3625 }
3626
3627 /* Expand expression EXP, which is a call to the memset builtin. Return
3628 NULL_RTX if we failed the caller should emit a normal call, otherwise
3629 try to get the result in TARGET, if convenient (and in mode MODE if that's
3630 convenient). */
3631
3632 static rtx
3633 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3634 {
3635 if (!validate_arglist (exp,
3636 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3637 return NULL_RTX;
3638 else
3639 {
3640 tree dest = CALL_EXPR_ARG (exp, 0);
3641 tree val = CALL_EXPR_ARG (exp, 1);
3642 tree len = CALL_EXPR_ARG (exp, 2);
3643 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3644 }
3645 }
3646
3647 /* Helper function to do the actual work for expand_builtin_memset. The
3648 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3649 so that this can also be called without constructing an actual CALL_EXPR.
3650 The other arguments and return value are the same as for
3651 expand_builtin_memset. */
3652
3653 static rtx
3654 expand_builtin_memset_args (tree dest, tree val, tree len,
3655 rtx target, enum machine_mode mode, tree orig_exp)
3656 {
3657 tree fndecl, fn;
3658 enum built_in_function fcode;
3659 enum machine_mode val_mode;
3660 char c;
3661 unsigned int dest_align;
3662 rtx dest_mem, dest_addr, len_rtx;
3663 HOST_WIDE_INT expected_size = -1;
3664 unsigned int expected_align = 0;
3665 unsigned HOST_WIDE_INT min_size;
3666 unsigned HOST_WIDE_INT max_size;
3667 unsigned HOST_WIDE_INT probable_max_size;
3668
3669 dest_align = get_pointer_alignment (dest);
3670
3671 /* If DEST is not a pointer type, don't do this operation in-line. */
3672 if (dest_align == 0)
3673 return NULL_RTX;
3674
3675 if (currently_expanding_gimple_stmt)
3676 stringop_block_profile (currently_expanding_gimple_stmt,
3677 &expected_align, &expected_size);
3678
3679 if (expected_align < dest_align)
3680 expected_align = dest_align;
3681
3682 /* If the LEN parameter is zero, return DEST. */
3683 if (integer_zerop (len))
3684 {
3685 /* Evaluate and ignore VAL in case it has side-effects. */
3686 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3687 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3688 }
3689
3690 /* Stabilize the arguments in case we fail. */
3691 dest = builtin_save_expr (dest);
3692 val = builtin_save_expr (val);
3693 len = builtin_save_expr (len);
3694
3695 len_rtx = expand_normal (len);
3696 determine_block_size (len, len_rtx, &min_size, &max_size,
3697 &probable_max_size);
3698 dest_mem = get_memory_rtx (dest, len);
3699 val_mode = TYPE_MODE (unsigned_char_type_node);
3700
3701 if (TREE_CODE (val) != INTEGER_CST)
3702 {
3703 rtx val_rtx;
3704
3705 val_rtx = expand_normal (val);
3706 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3707
3708 /* Assume that we can memset by pieces if we can store
3709 * the coefficients by pieces (in the required modes).
3710 * We can't pass builtin_memset_gen_str as that emits RTL. */
3711 c = 1;
3712 if (tree_fits_uhwi_p (len)
3713 && can_store_by_pieces (tree_to_uhwi (len),
3714 builtin_memset_read_str, &c, dest_align,
3715 true))
3716 {
3717 val_rtx = force_reg (val_mode, val_rtx);
3718 store_by_pieces (dest_mem, tree_to_uhwi (len),
3719 builtin_memset_gen_str, val_rtx, dest_align,
3720 true, 0);
3721 }
3722 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3723 dest_align, expected_align,
3724 expected_size, min_size, max_size,
3725 probable_max_size))
3726 goto do_libcall;
3727
3728 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3729 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3730 return dest_mem;
3731 }
3732
3733 if (target_char_cast (val, &c))
3734 goto do_libcall;
3735
3736 if (c)
3737 {
3738 if (tree_fits_uhwi_p (len)
3739 && can_store_by_pieces (tree_to_uhwi (len),
3740 builtin_memset_read_str, &c, dest_align,
3741 true))
3742 store_by_pieces (dest_mem, tree_to_uhwi (len),
3743 builtin_memset_read_str, &c, dest_align, true, 0);
3744 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3745 gen_int_mode (c, val_mode),
3746 dest_align, expected_align,
3747 expected_size, min_size, max_size,
3748 probable_max_size))
3749 goto do_libcall;
3750
3751 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3752 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3753 return dest_mem;
3754 }
3755
3756 set_mem_align (dest_mem, dest_align);
3757 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3758 CALL_EXPR_TAILCALL (orig_exp)
3759 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3760 expected_align, expected_size,
3761 min_size, max_size,
3762 probable_max_size);
3763
3764 if (dest_addr == 0)
3765 {
3766 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3767 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3768 }
3769
3770 return dest_addr;
3771
3772 do_libcall:
3773 fndecl = get_callee_fndecl (orig_exp);
3774 fcode = DECL_FUNCTION_CODE (fndecl);
3775 if (fcode == BUILT_IN_MEMSET)
3776 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3777 dest, val, len);
3778 else if (fcode == BUILT_IN_BZERO)
3779 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3780 dest, len);
3781 else
3782 gcc_unreachable ();
3783 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3784 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3785 return expand_call (fn, target, target == const0_rtx);
3786 }
3787
3788 /* Expand expression EXP, which is a call to the bzero builtin. Return
3789 NULL_RTX if we failed the caller should emit a normal call. */
3790
3791 static rtx
3792 expand_builtin_bzero (tree exp)
3793 {
3794 tree dest, size;
3795 location_t loc = EXPR_LOCATION (exp);
3796
3797 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3798 return NULL_RTX;
3799
3800 dest = CALL_EXPR_ARG (exp, 0);
3801 size = CALL_EXPR_ARG (exp, 1);
3802
3803 /* New argument list transforming bzero(ptr x, int y) to
3804 memset(ptr x, int 0, size_t y). This is done this way
3805 so that if it isn't expanded inline, we fallback to
3806 calling bzero instead of memset. */
3807
3808 return expand_builtin_memset_args (dest, integer_zero_node,
3809 fold_convert_loc (loc,
3810 size_type_node, size),
3811 const0_rtx, VOIDmode, exp);
3812 }
3813
3814 /* Expand expression EXP, which is a call to the memcmp built-in function.
3815 Return NULL_RTX if we failed and the caller should emit a normal call,
3816 otherwise try to get the result in TARGET, if convenient (and in mode
3817 MODE, if that's convenient). */
3818
3819 static rtx
3820 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3821 ATTRIBUTE_UNUSED enum machine_mode mode)
3822 {
3823 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3824
3825 if (!validate_arglist (exp,
3826 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3827 return NULL_RTX;
3828
3829 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3830 implementing memcmp because it will stop if it encounters two
3831 zero bytes. */
3832 #if defined HAVE_cmpmemsi
3833 {
3834 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3835 rtx result;
3836 rtx insn;
3837 tree arg1 = CALL_EXPR_ARG (exp, 0);
3838 tree arg2 = CALL_EXPR_ARG (exp, 1);
3839 tree len = CALL_EXPR_ARG (exp, 2);
3840
3841 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3842 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3843 enum machine_mode insn_mode;
3844
3845 if (HAVE_cmpmemsi)
3846 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3847 else
3848 return NULL_RTX;
3849
3850 /* If we don't have POINTER_TYPE, call the function. */
3851 if (arg1_align == 0 || arg2_align == 0)
3852 return NULL_RTX;
3853
3854 /* Make a place to write the result of the instruction. */
3855 result = target;
3856 if (! (result != 0
3857 && REG_P (result) && GET_MODE (result) == insn_mode
3858 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3859 result = gen_reg_rtx (insn_mode);
3860
3861 arg1_rtx = get_memory_rtx (arg1, len);
3862 arg2_rtx = get_memory_rtx (arg2, len);
3863 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3864
3865 /* Set MEM_SIZE as appropriate. */
3866 if (CONST_INT_P (arg3_rtx))
3867 {
3868 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3869 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3870 }
3871
3872 if (HAVE_cmpmemsi)
3873 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3874 GEN_INT (MIN (arg1_align, arg2_align)));
3875 else
3876 gcc_unreachable ();
3877
3878 if (insn)
3879 emit_insn (insn);
3880 else
3881 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3882 TYPE_MODE (integer_type_node), 3,
3883 XEXP (arg1_rtx, 0), Pmode,
3884 XEXP (arg2_rtx, 0), Pmode,
3885 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3886 TYPE_UNSIGNED (sizetype)),
3887 TYPE_MODE (sizetype));
3888
3889 /* Return the value in the proper mode for this function. */
3890 mode = TYPE_MODE (TREE_TYPE (exp));
3891 if (GET_MODE (result) == mode)
3892 return result;
3893 else if (target != 0)
3894 {
3895 convert_move (target, result, 0);
3896 return target;
3897 }
3898 else
3899 return convert_to_mode (mode, result, 0);
3900 }
3901 #endif /* HAVE_cmpmemsi. */
3902
3903 return NULL_RTX;
3904 }
3905
3906 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3907 if we failed the caller should emit a normal call, otherwise try to get
3908 the result in TARGET, if convenient. */
3909
3910 static rtx
3911 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3912 {
3913 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3914 return NULL_RTX;
3915
3916 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3917 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3918 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3919 {
3920 rtx arg1_rtx, arg2_rtx;
3921 rtx result, insn = NULL_RTX;
3922 tree fndecl, fn;
3923 tree arg1 = CALL_EXPR_ARG (exp, 0);
3924 tree arg2 = CALL_EXPR_ARG (exp, 1);
3925
3926 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3927 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3928
3929 /* If we don't have POINTER_TYPE, call the function. */
3930 if (arg1_align == 0 || arg2_align == 0)
3931 return NULL_RTX;
3932
3933 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3934 arg1 = builtin_save_expr (arg1);
3935 arg2 = builtin_save_expr (arg2);
3936
3937 arg1_rtx = get_memory_rtx (arg1, NULL);
3938 arg2_rtx = get_memory_rtx (arg2, NULL);
3939
3940 #ifdef HAVE_cmpstrsi
3941 /* Try to call cmpstrsi. */
3942 if (HAVE_cmpstrsi)
3943 {
3944 enum machine_mode insn_mode
3945 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3946
3947 /* Make a place to write the result of the instruction. */
3948 result = target;
3949 if (! (result != 0
3950 && REG_P (result) && GET_MODE (result) == insn_mode
3951 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3952 result = gen_reg_rtx (insn_mode);
3953
3954 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3955 GEN_INT (MIN (arg1_align, arg2_align)));
3956 }
3957 #endif
3958 #ifdef HAVE_cmpstrnsi
3959 /* Try to determine at least one length and call cmpstrnsi. */
3960 if (!insn && HAVE_cmpstrnsi)
3961 {
3962 tree len;
3963 rtx arg3_rtx;
3964
3965 enum machine_mode insn_mode
3966 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3967 tree len1 = c_strlen (arg1, 1);
3968 tree len2 = c_strlen (arg2, 1);
3969
3970 if (len1)
3971 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3972 if (len2)
3973 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3974
3975 /* If we don't have a constant length for the first, use the length
3976 of the second, if we know it. We don't require a constant for
3977 this case; some cost analysis could be done if both are available
3978 but neither is constant. For now, assume they're equally cheap,
3979 unless one has side effects. If both strings have constant lengths,
3980 use the smaller. */
3981
3982 if (!len1)
3983 len = len2;
3984 else if (!len2)
3985 len = len1;
3986 else if (TREE_SIDE_EFFECTS (len1))
3987 len = len2;
3988 else if (TREE_SIDE_EFFECTS (len2))
3989 len = len1;
3990 else if (TREE_CODE (len1) != INTEGER_CST)
3991 len = len2;
3992 else if (TREE_CODE (len2) != INTEGER_CST)
3993 len = len1;
3994 else if (tree_int_cst_lt (len1, len2))
3995 len = len1;
3996 else
3997 len = len2;
3998
3999 /* If both arguments have side effects, we cannot optimize. */
4000 if (!len || TREE_SIDE_EFFECTS (len))
4001 goto do_libcall;
4002
4003 arg3_rtx = expand_normal (len);
4004
4005 /* Make a place to write the result of the instruction. */
4006 result = target;
4007 if (! (result != 0
4008 && REG_P (result) && GET_MODE (result) == insn_mode
4009 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4010 result = gen_reg_rtx (insn_mode);
4011
4012 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4013 GEN_INT (MIN (arg1_align, arg2_align)));
4014 }
4015 #endif
4016
4017 if (insn)
4018 {
4019 enum machine_mode mode;
4020 emit_insn (insn);
4021
4022 /* Return the value in the proper mode for this function. */
4023 mode = TYPE_MODE (TREE_TYPE (exp));
4024 if (GET_MODE (result) == mode)
4025 return result;
4026 if (target == 0)
4027 return convert_to_mode (mode, result, 0);
4028 convert_move (target, result, 0);
4029 return target;
4030 }
4031
4032 /* Expand the library call ourselves using a stabilized argument
4033 list to avoid re-evaluating the function's arguments twice. */
4034 #ifdef HAVE_cmpstrnsi
4035 do_libcall:
4036 #endif
4037 fndecl = get_callee_fndecl (exp);
4038 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4039 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4040 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4041 return expand_call (fn, target, target == const0_rtx);
4042 }
4043 #endif
4044 return NULL_RTX;
4045 }
4046
4047 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4048 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4049 the result in TARGET, if convenient. */
4050
4051 static rtx
4052 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4053 ATTRIBUTE_UNUSED enum machine_mode mode)
4054 {
4055 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4056
4057 if (!validate_arglist (exp,
4058 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4059 return NULL_RTX;
4060
4061 /* If c_strlen can determine an expression for one of the string
4062 lengths, and it doesn't have side effects, then emit cmpstrnsi
4063 using length MIN(strlen(string)+1, arg3). */
4064 #ifdef HAVE_cmpstrnsi
4065 if (HAVE_cmpstrnsi)
4066 {
4067 tree len, len1, len2;
4068 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4069 rtx result, insn;
4070 tree fndecl, fn;
4071 tree arg1 = CALL_EXPR_ARG (exp, 0);
4072 tree arg2 = CALL_EXPR_ARG (exp, 1);
4073 tree arg3 = CALL_EXPR_ARG (exp, 2);
4074
4075 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4076 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4077 enum machine_mode insn_mode
4078 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4079
4080 len1 = c_strlen (arg1, 1);
4081 len2 = c_strlen (arg2, 1);
4082
4083 if (len1)
4084 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4085 if (len2)
4086 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4087
4088 /* If we don't have a constant length for the first, use the length
4089 of the second, if we know it. We don't require a constant for
4090 this case; some cost analysis could be done if both are available
4091 but neither is constant. For now, assume they're equally cheap,
4092 unless one has side effects. If both strings have constant lengths,
4093 use the smaller. */
4094
4095 if (!len1)
4096 len = len2;
4097 else if (!len2)
4098 len = len1;
4099 else if (TREE_SIDE_EFFECTS (len1))
4100 len = len2;
4101 else if (TREE_SIDE_EFFECTS (len2))
4102 len = len1;
4103 else if (TREE_CODE (len1) != INTEGER_CST)
4104 len = len2;
4105 else if (TREE_CODE (len2) != INTEGER_CST)
4106 len = len1;
4107 else if (tree_int_cst_lt (len1, len2))
4108 len = len1;
4109 else
4110 len = len2;
4111
4112 /* If both arguments have side effects, we cannot optimize. */
4113 if (!len || TREE_SIDE_EFFECTS (len))
4114 return NULL_RTX;
4115
4116 /* The actual new length parameter is MIN(len,arg3). */
4117 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4118 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4119
4120 /* If we don't have POINTER_TYPE, call the function. */
4121 if (arg1_align == 0 || arg2_align == 0)
4122 return NULL_RTX;
4123
4124 /* Make a place to write the result of the instruction. */
4125 result = target;
4126 if (! (result != 0
4127 && REG_P (result) && GET_MODE (result) == insn_mode
4128 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4129 result = gen_reg_rtx (insn_mode);
4130
4131 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4132 arg1 = builtin_save_expr (arg1);
4133 arg2 = builtin_save_expr (arg2);
4134 len = builtin_save_expr (len);
4135
4136 arg1_rtx = get_memory_rtx (arg1, len);
4137 arg2_rtx = get_memory_rtx (arg2, len);
4138 arg3_rtx = expand_normal (len);
4139 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4140 GEN_INT (MIN (arg1_align, arg2_align)));
4141 if (insn)
4142 {
4143 emit_insn (insn);
4144
4145 /* Return the value in the proper mode for this function. */
4146 mode = TYPE_MODE (TREE_TYPE (exp));
4147 if (GET_MODE (result) == mode)
4148 return result;
4149 if (target == 0)
4150 return convert_to_mode (mode, result, 0);
4151 convert_move (target, result, 0);
4152 return target;
4153 }
4154
4155 /* Expand the library call ourselves using a stabilized argument
4156 list to avoid re-evaluating the function's arguments twice. */
4157 fndecl = get_callee_fndecl (exp);
4158 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4159 arg1, arg2, len);
4160 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4161 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4162 return expand_call (fn, target, target == const0_rtx);
4163 }
4164 #endif
4165 return NULL_RTX;
4166 }
4167
4168 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4169 if that's convenient. */
4170
4171 rtx
4172 expand_builtin_saveregs (void)
4173 {
4174 rtx val, seq;
4175
4176 /* Don't do __builtin_saveregs more than once in a function.
4177 Save the result of the first call and reuse it. */
4178 if (saveregs_value != 0)
4179 return saveregs_value;
4180
4181 /* When this function is called, it means that registers must be
4182 saved on entry to this function. So we migrate the call to the
4183 first insn of this function. */
4184
4185 start_sequence ();
4186
4187 /* Do whatever the machine needs done in this case. */
4188 val = targetm.calls.expand_builtin_saveregs ();
4189
4190 seq = get_insns ();
4191 end_sequence ();
4192
4193 saveregs_value = val;
4194
4195 /* Put the insns after the NOTE that starts the function. If this
4196 is inside a start_sequence, make the outer-level insn chain current, so
4197 the code is placed at the start of the function. */
4198 push_topmost_sequence ();
4199 emit_insn_after (seq, entry_of_function ());
4200 pop_topmost_sequence ();
4201
4202 return val;
4203 }
4204
4205 /* Expand a call to __builtin_next_arg. */
4206
4207 static rtx
4208 expand_builtin_next_arg (void)
4209 {
4210 /* Checking arguments is already done in fold_builtin_next_arg
4211 that must be called before this function. */
4212 return expand_binop (ptr_mode, add_optab,
4213 crtl->args.internal_arg_pointer,
4214 crtl->args.arg_offset_rtx,
4215 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4216 }
4217
4218 /* Make it easier for the backends by protecting the valist argument
4219 from multiple evaluations. */
4220
4221 static tree
4222 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4223 {
4224 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4225
4226 /* The current way of determining the type of valist is completely
4227 bogus. We should have the information on the va builtin instead. */
4228 if (!vatype)
4229 vatype = targetm.fn_abi_va_list (cfun->decl);
4230
4231 if (TREE_CODE (vatype) == ARRAY_TYPE)
4232 {
4233 if (TREE_SIDE_EFFECTS (valist))
4234 valist = save_expr (valist);
4235
4236 /* For this case, the backends will be expecting a pointer to
4237 vatype, but it's possible we've actually been given an array
4238 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4239 So fix it. */
4240 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4241 {
4242 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4243 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4244 }
4245 }
4246 else
4247 {
4248 tree pt = build_pointer_type (vatype);
4249
4250 if (! needs_lvalue)
4251 {
4252 if (! TREE_SIDE_EFFECTS (valist))
4253 return valist;
4254
4255 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4256 TREE_SIDE_EFFECTS (valist) = 1;
4257 }
4258
4259 if (TREE_SIDE_EFFECTS (valist))
4260 valist = save_expr (valist);
4261 valist = fold_build2_loc (loc, MEM_REF,
4262 vatype, valist, build_int_cst (pt, 0));
4263 }
4264
4265 return valist;
4266 }
4267
4268 /* The "standard" definition of va_list is void*. */
4269
4270 tree
4271 std_build_builtin_va_list (void)
4272 {
4273 return ptr_type_node;
4274 }
4275
4276 /* The "standard" abi va_list is va_list_type_node. */
4277
4278 tree
4279 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4280 {
4281 return va_list_type_node;
4282 }
4283
4284 /* The "standard" type of va_list is va_list_type_node. */
4285
4286 tree
4287 std_canonical_va_list_type (tree type)
4288 {
4289 tree wtype, htype;
4290
4291 if (INDIRECT_REF_P (type))
4292 type = TREE_TYPE (type);
4293 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4294 type = TREE_TYPE (type);
4295 wtype = va_list_type_node;
4296 htype = type;
4297 /* Treat structure va_list types. */
4298 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4299 htype = TREE_TYPE (htype);
4300 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4301 {
4302 /* If va_list is an array type, the argument may have decayed
4303 to a pointer type, e.g. by being passed to another function.
4304 In that case, unwrap both types so that we can compare the
4305 underlying records. */
4306 if (TREE_CODE (htype) == ARRAY_TYPE
4307 || POINTER_TYPE_P (htype))
4308 {
4309 wtype = TREE_TYPE (wtype);
4310 htype = TREE_TYPE (htype);
4311 }
4312 }
4313 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4314 return va_list_type_node;
4315
4316 return NULL_TREE;
4317 }
4318
4319 /* The "standard" implementation of va_start: just assign `nextarg' to
4320 the variable. */
4321
4322 void
4323 std_expand_builtin_va_start (tree valist, rtx nextarg)
4324 {
4325 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4326 convert_move (va_r, nextarg, 0);
4327 }
4328
4329 /* Expand EXP, a call to __builtin_va_start. */
4330
4331 static rtx
4332 expand_builtin_va_start (tree exp)
4333 {
4334 rtx nextarg;
4335 tree valist;
4336 location_t loc = EXPR_LOCATION (exp);
4337
4338 if (call_expr_nargs (exp) < 2)
4339 {
4340 error_at (loc, "too few arguments to function %<va_start%>");
4341 return const0_rtx;
4342 }
4343
4344 if (fold_builtin_next_arg (exp, true))
4345 return const0_rtx;
4346
4347 nextarg = expand_builtin_next_arg ();
4348 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4349
4350 if (targetm.expand_builtin_va_start)
4351 targetm.expand_builtin_va_start (valist, nextarg);
4352 else
4353 std_expand_builtin_va_start (valist, nextarg);
4354
4355 return const0_rtx;
4356 }
4357
4358 /* Expand EXP, a call to __builtin_va_end. */
4359
4360 static rtx
4361 expand_builtin_va_end (tree exp)
4362 {
4363 tree valist = CALL_EXPR_ARG (exp, 0);
4364
4365 /* Evaluate for side effects, if needed. I hate macros that don't
4366 do that. */
4367 if (TREE_SIDE_EFFECTS (valist))
4368 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4369
4370 return const0_rtx;
4371 }
4372
4373 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4374 builtin rather than just as an assignment in stdarg.h because of the
4375 nastiness of array-type va_list types. */
4376
4377 static rtx
4378 expand_builtin_va_copy (tree exp)
4379 {
4380 tree dst, src, t;
4381 location_t loc = EXPR_LOCATION (exp);
4382
4383 dst = CALL_EXPR_ARG (exp, 0);
4384 src = CALL_EXPR_ARG (exp, 1);
4385
4386 dst = stabilize_va_list_loc (loc, dst, 1);
4387 src = stabilize_va_list_loc (loc, src, 0);
4388
4389 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4390
4391 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4392 {
4393 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4394 TREE_SIDE_EFFECTS (t) = 1;
4395 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4396 }
4397 else
4398 {
4399 rtx dstb, srcb, size;
4400
4401 /* Evaluate to pointers. */
4402 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4403 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4404 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4405 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4406
4407 dstb = convert_memory_address (Pmode, dstb);
4408 srcb = convert_memory_address (Pmode, srcb);
4409
4410 /* "Dereference" to BLKmode memories. */
4411 dstb = gen_rtx_MEM (BLKmode, dstb);
4412 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4413 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4414 srcb = gen_rtx_MEM (BLKmode, srcb);
4415 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4416 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4417
4418 /* Copy. */
4419 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4420 }
4421
4422 return const0_rtx;
4423 }
4424
4425 /* Expand a call to one of the builtin functions __builtin_frame_address or
4426 __builtin_return_address. */
4427
4428 static rtx
4429 expand_builtin_frame_address (tree fndecl, tree exp)
4430 {
4431 /* The argument must be a nonnegative integer constant.
4432 It counts the number of frames to scan up the stack.
4433 The value is the return address saved in that frame. */
4434 if (call_expr_nargs (exp) == 0)
4435 /* Warning about missing arg was already issued. */
4436 return const0_rtx;
4437 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4438 {
4439 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4440 error ("invalid argument to %<__builtin_frame_address%>");
4441 else
4442 error ("invalid argument to %<__builtin_return_address%>");
4443 return const0_rtx;
4444 }
4445 else
4446 {
4447 rtx tem
4448 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4449 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4450
4451 /* Some ports cannot access arbitrary stack frames. */
4452 if (tem == NULL)
4453 {
4454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4455 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4456 else
4457 warning (0, "unsupported argument to %<__builtin_return_address%>");
4458 return const0_rtx;
4459 }
4460
4461 /* For __builtin_frame_address, return what we've got. */
4462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4463 return tem;
4464
4465 if (!REG_P (tem)
4466 && ! CONSTANT_P (tem))
4467 tem = copy_addr_to_reg (tem);
4468 return tem;
4469 }
4470 }
4471
4472 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4473 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4474 is the same as for allocate_dynamic_stack_space. */
4475
4476 static rtx
4477 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4478 {
4479 rtx op0;
4480 rtx result;
4481 bool valid_arglist;
4482 unsigned int align;
4483 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4484 == BUILT_IN_ALLOCA_WITH_ALIGN);
4485
4486 valid_arglist
4487 = (alloca_with_align
4488 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4489 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4490
4491 if (!valid_arglist)
4492 return NULL_RTX;
4493
4494 /* Compute the argument. */
4495 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4496
4497 /* Compute the alignment. */
4498 align = (alloca_with_align
4499 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4500 : BIGGEST_ALIGNMENT);
4501
4502 /* Allocate the desired space. */
4503 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4504 result = convert_memory_address (ptr_mode, result);
4505
4506 return result;
4507 }
4508
4509 /* Expand a call to bswap builtin in EXP.
4510 Return NULL_RTX if a normal call should be emitted rather than expanding the
4511 function in-line. If convenient, the result should be placed in TARGET.
4512 SUBTARGET may be used as the target for computing one of EXP's operands. */
4513
4514 static rtx
4515 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4516 rtx subtarget)
4517 {
4518 tree arg;
4519 rtx op0;
4520
4521 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4522 return NULL_RTX;
4523
4524 arg = CALL_EXPR_ARG (exp, 0);
4525 op0 = expand_expr (arg,
4526 subtarget && GET_MODE (subtarget) == target_mode
4527 ? subtarget : NULL_RTX,
4528 target_mode, EXPAND_NORMAL);
4529 if (GET_MODE (op0) != target_mode)
4530 op0 = convert_to_mode (target_mode, op0, 1);
4531
4532 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4533
4534 gcc_assert (target);
4535
4536 return convert_to_mode (target_mode, target, 1);
4537 }
4538
4539 /* Expand a call to a unary builtin in EXP.
4540 Return NULL_RTX if a normal call should be emitted rather than expanding the
4541 function in-line. If convenient, the result should be placed in TARGET.
4542 SUBTARGET may be used as the target for computing one of EXP's operands. */
4543
4544 static rtx
4545 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4546 rtx subtarget, optab op_optab)
4547 {
4548 rtx op0;
4549
4550 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4551 return NULL_RTX;
4552
4553 /* Compute the argument. */
4554 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4555 (subtarget
4556 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4557 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4558 VOIDmode, EXPAND_NORMAL);
4559 /* Compute op, into TARGET if possible.
4560 Set TARGET to wherever the result comes back. */
4561 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4562 op_optab, op0, target, op_optab != clrsb_optab);
4563 gcc_assert (target);
4564
4565 return convert_to_mode (target_mode, target, 0);
4566 }
4567
4568 /* Expand a call to __builtin_expect. We just return our argument
4569 as the builtin_expect semantic should've been already executed by
4570 tree branch prediction pass. */
4571
4572 static rtx
4573 expand_builtin_expect (tree exp, rtx target)
4574 {
4575 tree arg;
4576
4577 if (call_expr_nargs (exp) < 2)
4578 return const0_rtx;
4579 arg = CALL_EXPR_ARG (exp, 0);
4580
4581 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4582 /* When guessing was done, the hints should be already stripped away. */
4583 gcc_assert (!flag_guess_branch_prob
4584 || optimize == 0 || seen_error ());
4585 return target;
4586 }
4587
4588 /* Expand a call to __builtin_assume_aligned. We just return our first
4589 argument as the builtin_assume_aligned semantic should've been already
4590 executed by CCP. */
4591
4592 static rtx
4593 expand_builtin_assume_aligned (tree exp, rtx target)
4594 {
4595 if (call_expr_nargs (exp) < 2)
4596 return const0_rtx;
4597 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4598 EXPAND_NORMAL);
4599 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4600 && (call_expr_nargs (exp) < 3
4601 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4602 return target;
4603 }
4604
4605 void
4606 expand_builtin_trap (void)
4607 {
4608 #ifdef HAVE_trap
4609 if (HAVE_trap)
4610 {
4611 rtx insn = emit_insn (gen_trap ());
4612 /* For trap insns when not accumulating outgoing args force
4613 REG_ARGS_SIZE note to prevent crossjumping of calls with
4614 different args sizes. */
4615 if (!ACCUMULATE_OUTGOING_ARGS)
4616 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4617 }
4618 else
4619 #endif
4620 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4621 emit_barrier ();
4622 }
4623
4624 /* Expand a call to __builtin_unreachable. We do nothing except emit
4625 a barrier saying that control flow will not pass here.
4626
4627 It is the responsibility of the program being compiled to ensure
4628 that control flow does never reach __builtin_unreachable. */
4629 static void
4630 expand_builtin_unreachable (void)
4631 {
4632 emit_barrier ();
4633 }
4634
4635 /* Expand EXP, a call to fabs, fabsf or fabsl.
4636 Return NULL_RTX if a normal call should be emitted rather than expanding
4637 the function inline. If convenient, the result should be placed
4638 in TARGET. SUBTARGET may be used as the target for computing
4639 the operand. */
4640
4641 static rtx
4642 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4643 {
4644 enum machine_mode mode;
4645 tree arg;
4646 rtx op0;
4647
4648 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4649 return NULL_RTX;
4650
4651 arg = CALL_EXPR_ARG (exp, 0);
4652 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4653 mode = TYPE_MODE (TREE_TYPE (arg));
4654 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4655 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4656 }
4657
4658 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4659 Return NULL is a normal call should be emitted rather than expanding the
4660 function inline. If convenient, the result should be placed in TARGET.
4661 SUBTARGET may be used as the target for computing the operand. */
4662
4663 static rtx
4664 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4665 {
4666 rtx op0, op1;
4667 tree arg;
4668
4669 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4670 return NULL_RTX;
4671
4672 arg = CALL_EXPR_ARG (exp, 0);
4673 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4674
4675 arg = CALL_EXPR_ARG (exp, 1);
4676 op1 = expand_normal (arg);
4677
4678 return expand_copysign (op0, op1, target);
4679 }
4680
4681 /* Create a new constant string literal and return a char* pointer to it.
4682 The STRING_CST value is the LEN characters at STR. */
4683 tree
4684 build_string_literal (int len, const char *str)
4685 {
4686 tree t, elem, index, type;
4687
4688 t = build_string (len, str);
4689 elem = build_type_variant (char_type_node, 1, 0);
4690 index = build_index_type (size_int (len - 1));
4691 type = build_array_type (elem, index);
4692 TREE_TYPE (t) = type;
4693 TREE_CONSTANT (t) = 1;
4694 TREE_READONLY (t) = 1;
4695 TREE_STATIC (t) = 1;
4696
4697 type = build_pointer_type (elem);
4698 t = build1 (ADDR_EXPR, type,
4699 build4 (ARRAY_REF, elem,
4700 t, integer_zero_node, NULL_TREE, NULL_TREE));
4701 return t;
4702 }
4703
4704 /* Expand a call to __builtin___clear_cache. */
4705
4706 static rtx
4707 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4708 {
4709 #ifndef HAVE_clear_cache
4710 #ifdef CLEAR_INSN_CACHE
4711 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4712 does something. Just do the default expansion to a call to
4713 __clear_cache(). */
4714 return NULL_RTX;
4715 #else
4716 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4717 does nothing. There is no need to call it. Do nothing. */
4718 return const0_rtx;
4719 #endif /* CLEAR_INSN_CACHE */
4720 #else
4721 /* We have a "clear_cache" insn, and it will handle everything. */
4722 tree begin, end;
4723 rtx begin_rtx, end_rtx;
4724
4725 /* We must not expand to a library call. If we did, any
4726 fallback library function in libgcc that might contain a call to
4727 __builtin___clear_cache() would recurse infinitely. */
4728 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4729 {
4730 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4731 return const0_rtx;
4732 }
4733
4734 if (HAVE_clear_cache)
4735 {
4736 struct expand_operand ops[2];
4737
4738 begin = CALL_EXPR_ARG (exp, 0);
4739 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4740
4741 end = CALL_EXPR_ARG (exp, 1);
4742 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4743
4744 create_address_operand (&ops[0], begin_rtx);
4745 create_address_operand (&ops[1], end_rtx);
4746 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4747 return const0_rtx;
4748 }
4749 return const0_rtx;
4750 #endif /* HAVE_clear_cache */
4751 }
4752
4753 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4754
4755 static rtx
4756 round_trampoline_addr (rtx tramp)
4757 {
4758 rtx temp, addend, mask;
4759
4760 /* If we don't need too much alignment, we'll have been guaranteed
4761 proper alignment by get_trampoline_type. */
4762 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4763 return tramp;
4764
4765 /* Round address up to desired boundary. */
4766 temp = gen_reg_rtx (Pmode);
4767 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4768 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4769
4770 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4771 temp, 0, OPTAB_LIB_WIDEN);
4772 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4773 temp, 0, OPTAB_LIB_WIDEN);
4774
4775 return tramp;
4776 }
4777
4778 static rtx
4779 expand_builtin_init_trampoline (tree exp, bool onstack)
4780 {
4781 tree t_tramp, t_func, t_chain;
4782 rtx m_tramp, r_tramp, r_chain, tmp;
4783
4784 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4785 POINTER_TYPE, VOID_TYPE))
4786 return NULL_RTX;
4787
4788 t_tramp = CALL_EXPR_ARG (exp, 0);
4789 t_func = CALL_EXPR_ARG (exp, 1);
4790 t_chain = CALL_EXPR_ARG (exp, 2);
4791
4792 r_tramp = expand_normal (t_tramp);
4793 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4794 MEM_NOTRAP_P (m_tramp) = 1;
4795
4796 /* If ONSTACK, the TRAMP argument should be the address of a field
4797 within the local function's FRAME decl. Either way, let's see if
4798 we can fill in the MEM_ATTRs for this memory. */
4799 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4800 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4801
4802 /* Creator of a heap trampoline is responsible for making sure the
4803 address is aligned to at least STACK_BOUNDARY. Normally malloc
4804 will ensure this anyhow. */
4805 tmp = round_trampoline_addr (r_tramp);
4806 if (tmp != r_tramp)
4807 {
4808 m_tramp = change_address (m_tramp, BLKmode, tmp);
4809 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4810 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4811 }
4812
4813 /* The FUNC argument should be the address of the nested function.
4814 Extract the actual function decl to pass to the hook. */
4815 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4816 t_func = TREE_OPERAND (t_func, 0);
4817 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4818
4819 r_chain = expand_normal (t_chain);
4820
4821 /* Generate insns to initialize the trampoline. */
4822 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4823
4824 if (onstack)
4825 {
4826 trampolines_created = 1;
4827
4828 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4829 "trampoline generated for nested function %qD", t_func);
4830 }
4831
4832 return const0_rtx;
4833 }
4834
4835 static rtx
4836 expand_builtin_adjust_trampoline (tree exp)
4837 {
4838 rtx tramp;
4839
4840 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4841 return NULL_RTX;
4842
4843 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4844 tramp = round_trampoline_addr (tramp);
4845 if (targetm.calls.trampoline_adjust_address)
4846 tramp = targetm.calls.trampoline_adjust_address (tramp);
4847
4848 return tramp;
4849 }
4850
4851 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4852 function. The function first checks whether the back end provides
4853 an insn to implement signbit for the respective mode. If not, it
4854 checks whether the floating point format of the value is such that
4855 the sign bit can be extracted. If that is not the case, the
4856 function returns NULL_RTX to indicate that a normal call should be
4857 emitted rather than expanding the function in-line. EXP is the
4858 expression that is a call to the builtin function; if convenient,
4859 the result should be placed in TARGET. */
4860 static rtx
4861 expand_builtin_signbit (tree exp, rtx target)
4862 {
4863 const struct real_format *fmt;
4864 enum machine_mode fmode, imode, rmode;
4865 tree arg;
4866 int word, bitpos;
4867 enum insn_code icode;
4868 rtx temp;
4869 location_t loc = EXPR_LOCATION (exp);
4870
4871 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4872 return NULL_RTX;
4873
4874 arg = CALL_EXPR_ARG (exp, 0);
4875 fmode = TYPE_MODE (TREE_TYPE (arg));
4876 rmode = TYPE_MODE (TREE_TYPE (exp));
4877 fmt = REAL_MODE_FORMAT (fmode);
4878
4879 arg = builtin_save_expr (arg);
4880
4881 /* Expand the argument yielding a RTX expression. */
4882 temp = expand_normal (arg);
4883
4884 /* Check if the back end provides an insn that handles signbit for the
4885 argument's mode. */
4886 icode = optab_handler (signbit_optab, fmode);
4887 if (icode != CODE_FOR_nothing)
4888 {
4889 rtx last = get_last_insn ();
4890 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4891 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4892 return target;
4893 delete_insns_since (last);
4894 }
4895
4896 /* For floating point formats without a sign bit, implement signbit
4897 as "ARG < 0.0". */
4898 bitpos = fmt->signbit_ro;
4899 if (bitpos < 0)
4900 {
4901 /* But we can't do this if the format supports signed zero. */
4902 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4903 return NULL_RTX;
4904
4905 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4906 build_real (TREE_TYPE (arg), dconst0));
4907 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4908 }
4909
4910 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4911 {
4912 imode = int_mode_for_mode (fmode);
4913 if (imode == BLKmode)
4914 return NULL_RTX;
4915 temp = gen_lowpart (imode, temp);
4916 }
4917 else
4918 {
4919 imode = word_mode;
4920 /* Handle targets with different FP word orders. */
4921 if (FLOAT_WORDS_BIG_ENDIAN)
4922 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4923 else
4924 word = bitpos / BITS_PER_WORD;
4925 temp = operand_subword_force (temp, word, fmode);
4926 bitpos = bitpos % BITS_PER_WORD;
4927 }
4928
4929 /* Force the intermediate word_mode (or narrower) result into a
4930 register. This avoids attempting to create paradoxical SUBREGs
4931 of floating point modes below. */
4932 temp = force_reg (imode, temp);
4933
4934 /* If the bitpos is within the "result mode" lowpart, the operation
4935 can be implement with a single bitwise AND. Otherwise, we need
4936 a right shift and an AND. */
4937
4938 if (bitpos < GET_MODE_BITSIZE (rmode))
4939 {
4940 double_int mask = double_int_zero.set_bit (bitpos);
4941
4942 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4943 temp = gen_lowpart (rmode, temp);
4944 temp = expand_binop (rmode, and_optab, temp,
4945 immed_double_int_const (mask, rmode),
4946 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4947 }
4948 else
4949 {
4950 /* Perform a logical right shift to place the signbit in the least
4951 significant bit, then truncate the result to the desired mode
4952 and mask just this bit. */
4953 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4954 temp = gen_lowpart (rmode, temp);
4955 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4956 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4957 }
4958
4959 return temp;
4960 }
4961
4962 /* Expand fork or exec calls. TARGET is the desired target of the
4963 call. EXP is the call. FN is the
4964 identificator of the actual function. IGNORE is nonzero if the
4965 value is to be ignored. */
4966
4967 static rtx
4968 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4969 {
4970 tree id, decl;
4971 tree call;
4972
4973 /* If we are not profiling, just call the function. */
4974 if (!profile_arc_flag)
4975 return NULL_RTX;
4976
4977 /* Otherwise call the wrapper. This should be equivalent for the rest of
4978 compiler, so the code does not diverge, and the wrapper may run the
4979 code necessary for keeping the profiling sane. */
4980
4981 switch (DECL_FUNCTION_CODE (fn))
4982 {
4983 case BUILT_IN_FORK:
4984 id = get_identifier ("__gcov_fork");
4985 break;
4986
4987 case BUILT_IN_EXECL:
4988 id = get_identifier ("__gcov_execl");
4989 break;
4990
4991 case BUILT_IN_EXECV:
4992 id = get_identifier ("__gcov_execv");
4993 break;
4994
4995 case BUILT_IN_EXECLP:
4996 id = get_identifier ("__gcov_execlp");
4997 break;
4998
4999 case BUILT_IN_EXECLE:
5000 id = get_identifier ("__gcov_execle");
5001 break;
5002
5003 case BUILT_IN_EXECVP:
5004 id = get_identifier ("__gcov_execvp");
5005 break;
5006
5007 case BUILT_IN_EXECVE:
5008 id = get_identifier ("__gcov_execve");
5009 break;
5010
5011 default:
5012 gcc_unreachable ();
5013 }
5014
5015 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5016 FUNCTION_DECL, id, TREE_TYPE (fn));
5017 DECL_EXTERNAL (decl) = 1;
5018 TREE_PUBLIC (decl) = 1;
5019 DECL_ARTIFICIAL (decl) = 1;
5020 TREE_NOTHROW (decl) = 1;
5021 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5022 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5023 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5024 return expand_call (call, target, ignore);
5025 }
5026
5027
5028 \f
5029 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5030 the pointer in these functions is void*, the tree optimizers may remove
5031 casts. The mode computed in expand_builtin isn't reliable either, due
5032 to __sync_bool_compare_and_swap.
5033
5034 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5035 group of builtins. This gives us log2 of the mode size. */
5036
5037 static inline enum machine_mode
5038 get_builtin_sync_mode (int fcode_diff)
5039 {
5040 /* The size is not negotiable, so ask not to get BLKmode in return
5041 if the target indicates that a smaller size would be better. */
5042 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5043 }
5044
5045 /* Expand the memory expression LOC and return the appropriate memory operand
5046 for the builtin_sync operations. */
5047
5048 static rtx
5049 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5050 {
5051 rtx addr, mem;
5052
5053 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5054 addr = convert_memory_address (Pmode, addr);
5055
5056 /* Note that we explicitly do not want any alias information for this
5057 memory, so that we kill all other live memories. Otherwise we don't
5058 satisfy the full barrier semantics of the intrinsic. */
5059 mem = validize_mem (gen_rtx_MEM (mode, addr));
5060
5061 /* The alignment needs to be at least according to that of the mode. */
5062 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5063 get_pointer_alignment (loc)));
5064 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5065 MEM_VOLATILE_P (mem) = 1;
5066
5067 return mem;
5068 }
5069
5070 /* Make sure an argument is in the right mode.
5071 EXP is the tree argument.
5072 MODE is the mode it should be in. */
5073
5074 static rtx
5075 expand_expr_force_mode (tree exp, enum machine_mode mode)
5076 {
5077 rtx val;
5078 enum machine_mode old_mode;
5079
5080 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5081 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5082 of CONST_INTs, where we know the old_mode only from the call argument. */
5083
5084 old_mode = GET_MODE (val);
5085 if (old_mode == VOIDmode)
5086 old_mode = TYPE_MODE (TREE_TYPE (exp));
5087 val = convert_modes (mode, old_mode, val, 1);
5088 return val;
5089 }
5090
5091
5092 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5093 EXP is the CALL_EXPR. CODE is the rtx code
5094 that corresponds to the arithmetic or logical operation from the name;
5095 an exception here is that NOT actually means NAND. TARGET is an optional
5096 place for us to store the results; AFTER is true if this is the
5097 fetch_and_xxx form. */
5098
5099 static rtx
5100 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5101 enum rtx_code code, bool after,
5102 rtx target)
5103 {
5104 rtx val, mem;
5105 location_t loc = EXPR_LOCATION (exp);
5106
5107 if (code == NOT && warn_sync_nand)
5108 {
5109 tree fndecl = get_callee_fndecl (exp);
5110 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5111
5112 static bool warned_f_a_n, warned_n_a_f;
5113
5114 switch (fcode)
5115 {
5116 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5117 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5118 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5119 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5120 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5121 if (warned_f_a_n)
5122 break;
5123
5124 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5125 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5126 warned_f_a_n = true;
5127 break;
5128
5129 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5130 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5131 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5132 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5133 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5134 if (warned_n_a_f)
5135 break;
5136
5137 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5138 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5139 warned_n_a_f = true;
5140 break;
5141
5142 default:
5143 gcc_unreachable ();
5144 }
5145 }
5146
5147 /* Expand the operands. */
5148 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5149 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5150
5151 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5152 after);
5153 }
5154
5155 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5156 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5157 true if this is the boolean form. TARGET is a place for us to store the
5158 results; this is NOT optional if IS_BOOL is true. */
5159
5160 static rtx
5161 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5162 bool is_bool, rtx target)
5163 {
5164 rtx old_val, new_val, mem;
5165 rtx *pbool, *poval;
5166
5167 /* Expand the operands. */
5168 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5169 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5170 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5171
5172 pbool = poval = NULL;
5173 if (target != const0_rtx)
5174 {
5175 if (is_bool)
5176 pbool = &target;
5177 else
5178 poval = &target;
5179 }
5180 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5181 false, MEMMODEL_SEQ_CST,
5182 MEMMODEL_SEQ_CST))
5183 return NULL_RTX;
5184
5185 return target;
5186 }
5187
5188 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5189 general form is actually an atomic exchange, and some targets only
5190 support a reduced form with the second argument being a constant 1.
5191 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5192 the results. */
5193
5194 static rtx
5195 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5196 rtx target)
5197 {
5198 rtx val, mem;
5199
5200 /* Expand the operands. */
5201 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5202 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5203
5204 return expand_sync_lock_test_and_set (target, mem, val);
5205 }
5206
5207 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5208
5209 static void
5210 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5211 {
5212 rtx mem;
5213
5214 /* Expand the operands. */
5215 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5216
5217 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5218 }
5219
5220 /* Given an integer representing an ``enum memmodel'', verify its
5221 correctness and return the memory model enum. */
5222
5223 static enum memmodel
5224 get_memmodel (tree exp)
5225 {
5226 rtx op;
5227 unsigned HOST_WIDE_INT val;
5228
5229 /* If the parameter is not a constant, it's a run time value so we'll just
5230 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5231 if (TREE_CODE (exp) != INTEGER_CST)
5232 return MEMMODEL_SEQ_CST;
5233
5234 op = expand_normal (exp);
5235
5236 val = INTVAL (op);
5237 if (targetm.memmodel_check)
5238 val = targetm.memmodel_check (val);
5239 else if (val & ~MEMMODEL_MASK)
5240 {
5241 warning (OPT_Winvalid_memory_model,
5242 "Unknown architecture specifier in memory model to builtin.");
5243 return MEMMODEL_SEQ_CST;
5244 }
5245
5246 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5247 {
5248 warning (OPT_Winvalid_memory_model,
5249 "invalid memory model argument to builtin");
5250 return MEMMODEL_SEQ_CST;
5251 }
5252
5253 return (enum memmodel) val;
5254 }
5255
5256 /* Expand the __atomic_exchange intrinsic:
5257 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5258 EXP is the CALL_EXPR.
5259 TARGET is an optional place for us to store the results. */
5260
5261 static rtx
5262 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5263 {
5264 rtx val, mem;
5265 enum memmodel model;
5266
5267 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5268 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5269 {
5270 error ("invalid memory model for %<__atomic_exchange%>");
5271 return NULL_RTX;
5272 }
5273
5274 if (!flag_inline_atomics)
5275 return NULL_RTX;
5276
5277 /* Expand the operands. */
5278 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5279 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5280
5281 return expand_atomic_exchange (target, mem, val, model);
5282 }
5283
5284 /* Expand the __atomic_compare_exchange intrinsic:
5285 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5286 TYPE desired, BOOL weak,
5287 enum memmodel success,
5288 enum memmodel failure)
5289 EXP is the CALL_EXPR.
5290 TARGET is an optional place for us to store the results. */
5291
5292 static rtx
5293 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5294 rtx target)
5295 {
5296 rtx expect, desired, mem, oldval;
5297 enum memmodel success, failure;
5298 tree weak;
5299 bool is_weak;
5300
5301 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5302 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5303
5304 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5305 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5306 {
5307 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5308 return NULL_RTX;
5309 }
5310
5311 if (failure > success)
5312 {
5313 error ("failure memory model cannot be stronger than success "
5314 "memory model for %<__atomic_compare_exchange%>");
5315 return NULL_RTX;
5316 }
5317
5318 if (!flag_inline_atomics)
5319 return NULL_RTX;
5320
5321 /* Expand the operands. */
5322 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5323
5324 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5325 expect = convert_memory_address (Pmode, expect);
5326 expect = gen_rtx_MEM (mode, expect);
5327 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5328
5329 weak = CALL_EXPR_ARG (exp, 3);
5330 is_weak = false;
5331 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5332 is_weak = true;
5333
5334 oldval = expect;
5335 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5336 &oldval, mem, oldval, desired,
5337 is_weak, success, failure))
5338 return NULL_RTX;
5339
5340 if (oldval != expect)
5341 emit_move_insn (expect, oldval);
5342
5343 return target;
5344 }
5345
5346 /* Expand the __atomic_load intrinsic:
5347 TYPE __atomic_load (TYPE *object, enum memmodel)
5348 EXP is the CALL_EXPR.
5349 TARGET is an optional place for us to store the results. */
5350
5351 static rtx
5352 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5353 {
5354 rtx mem;
5355 enum memmodel model;
5356
5357 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5358 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5359 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5360 {
5361 error ("invalid memory model for %<__atomic_load%>");
5362 return NULL_RTX;
5363 }
5364
5365 if (!flag_inline_atomics)
5366 return NULL_RTX;
5367
5368 /* Expand the operand. */
5369 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5370
5371 return expand_atomic_load (target, mem, model);
5372 }
5373
5374
5375 /* Expand the __atomic_store intrinsic:
5376 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5377 EXP is the CALL_EXPR.
5378 TARGET is an optional place for us to store the results. */
5379
5380 static rtx
5381 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5382 {
5383 rtx mem, val;
5384 enum memmodel model;
5385
5386 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5387 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5388 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5389 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5390 {
5391 error ("invalid memory model for %<__atomic_store%>");
5392 return NULL_RTX;
5393 }
5394
5395 if (!flag_inline_atomics)
5396 return NULL_RTX;
5397
5398 /* Expand the operands. */
5399 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5400 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5401
5402 return expand_atomic_store (mem, val, model, false);
5403 }
5404
5405 /* Expand the __atomic_fetch_XXX intrinsic:
5406 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5407 EXP is the CALL_EXPR.
5408 TARGET is an optional place for us to store the results.
5409 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5410 FETCH_AFTER is true if returning the result of the operation.
5411 FETCH_AFTER is false if returning the value before the operation.
5412 IGNORE is true if the result is not used.
5413 EXT_CALL is the correct builtin for an external call if this cannot be
5414 resolved to an instruction sequence. */
5415
5416 static rtx
5417 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5418 enum rtx_code code, bool fetch_after,
5419 bool ignore, enum built_in_function ext_call)
5420 {
5421 rtx val, mem, ret;
5422 enum memmodel model;
5423 tree fndecl;
5424 tree addr;
5425
5426 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5427
5428 /* Expand the operands. */
5429 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5430 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5431
5432 /* Only try generating instructions if inlining is turned on. */
5433 if (flag_inline_atomics)
5434 {
5435 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5436 if (ret)
5437 return ret;
5438 }
5439
5440 /* Return if a different routine isn't needed for the library call. */
5441 if (ext_call == BUILT_IN_NONE)
5442 return NULL_RTX;
5443
5444 /* Change the call to the specified function. */
5445 fndecl = get_callee_fndecl (exp);
5446 addr = CALL_EXPR_FN (exp);
5447 STRIP_NOPS (addr);
5448
5449 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5450 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5451
5452 /* Expand the call here so we can emit trailing code. */
5453 ret = expand_call (exp, target, ignore);
5454
5455 /* Replace the original function just in case it matters. */
5456 TREE_OPERAND (addr, 0) = fndecl;
5457
5458 /* Then issue the arithmetic correction to return the right result. */
5459 if (!ignore)
5460 {
5461 if (code == NOT)
5462 {
5463 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5464 OPTAB_LIB_WIDEN);
5465 ret = expand_simple_unop (mode, NOT, ret, target, true);
5466 }
5467 else
5468 ret = expand_simple_binop (mode, code, ret, val, target, true,
5469 OPTAB_LIB_WIDEN);
5470 }
5471 return ret;
5472 }
5473
5474
5475 #ifndef HAVE_atomic_clear
5476 # define HAVE_atomic_clear 0
5477 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5478 #endif
5479
5480 /* Expand an atomic clear operation.
5481 void _atomic_clear (BOOL *obj, enum memmodel)
5482 EXP is the call expression. */
5483
5484 static rtx
5485 expand_builtin_atomic_clear (tree exp)
5486 {
5487 enum machine_mode mode;
5488 rtx mem, ret;
5489 enum memmodel model;
5490
5491 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5492 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5493 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5494
5495 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5496 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5497 {
5498 error ("invalid memory model for %<__atomic_store%>");
5499 return const0_rtx;
5500 }
5501
5502 if (HAVE_atomic_clear)
5503 {
5504 emit_insn (gen_atomic_clear (mem, model));
5505 return const0_rtx;
5506 }
5507
5508 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5509 Failing that, a store is issued by __atomic_store. The only way this can
5510 fail is if the bool type is larger than a word size. Unlikely, but
5511 handle it anyway for completeness. Assume a single threaded model since
5512 there is no atomic support in this case, and no barriers are required. */
5513 ret = expand_atomic_store (mem, const0_rtx, model, true);
5514 if (!ret)
5515 emit_move_insn (mem, const0_rtx);
5516 return const0_rtx;
5517 }
5518
5519 /* Expand an atomic test_and_set operation.
5520 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5521 EXP is the call expression. */
5522
5523 static rtx
5524 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5525 {
5526 rtx mem;
5527 enum memmodel model;
5528 enum machine_mode mode;
5529
5530 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5531 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5532 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5533
5534 return expand_atomic_test_and_set (target, mem, model);
5535 }
5536
5537
5538 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5539 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5540
5541 static tree
5542 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5543 {
5544 int size;
5545 enum machine_mode mode;
5546 unsigned int mode_align, type_align;
5547
5548 if (TREE_CODE (arg0) != INTEGER_CST)
5549 return NULL_TREE;
5550
5551 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5552 mode = mode_for_size (size, MODE_INT, 0);
5553 mode_align = GET_MODE_ALIGNMENT (mode);
5554
5555 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5556 type_align = mode_align;
5557 else
5558 {
5559 tree ttype = TREE_TYPE (arg1);
5560
5561 /* This function is usually invoked and folded immediately by the front
5562 end before anything else has a chance to look at it. The pointer
5563 parameter at this point is usually cast to a void *, so check for that
5564 and look past the cast. */
5565 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5566 && VOID_TYPE_P (TREE_TYPE (ttype)))
5567 arg1 = TREE_OPERAND (arg1, 0);
5568
5569 ttype = TREE_TYPE (arg1);
5570 gcc_assert (POINTER_TYPE_P (ttype));
5571
5572 /* Get the underlying type of the object. */
5573 ttype = TREE_TYPE (ttype);
5574 type_align = TYPE_ALIGN (ttype);
5575 }
5576
5577 /* If the object has smaller alignment, the the lock free routines cannot
5578 be used. */
5579 if (type_align < mode_align)
5580 return boolean_false_node;
5581
5582 /* Check if a compare_and_swap pattern exists for the mode which represents
5583 the required size. The pattern is not allowed to fail, so the existence
5584 of the pattern indicates support is present. */
5585 if (can_compare_and_swap_p (mode, true))
5586 return boolean_true_node;
5587 else
5588 return boolean_false_node;
5589 }
5590
5591 /* Return true if the parameters to call EXP represent an object which will
5592 always generate lock free instructions. The first argument represents the
5593 size of the object, and the second parameter is a pointer to the object
5594 itself. If NULL is passed for the object, then the result is based on
5595 typical alignment for an object of the specified size. Otherwise return
5596 false. */
5597
5598 static rtx
5599 expand_builtin_atomic_always_lock_free (tree exp)
5600 {
5601 tree size;
5602 tree arg0 = CALL_EXPR_ARG (exp, 0);
5603 tree arg1 = CALL_EXPR_ARG (exp, 1);
5604
5605 if (TREE_CODE (arg0) != INTEGER_CST)
5606 {
5607 error ("non-constant argument 1 to __atomic_always_lock_free");
5608 return const0_rtx;
5609 }
5610
5611 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5612 if (size == boolean_true_node)
5613 return const1_rtx;
5614 return const0_rtx;
5615 }
5616
5617 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5618 is lock free on this architecture. */
5619
5620 static tree
5621 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5622 {
5623 if (!flag_inline_atomics)
5624 return NULL_TREE;
5625
5626 /* If it isn't always lock free, don't generate a result. */
5627 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5628 return boolean_true_node;
5629
5630 return NULL_TREE;
5631 }
5632
5633 /* Return true if the parameters to call EXP represent an object which will
5634 always generate lock free instructions. The first argument represents the
5635 size of the object, and the second parameter is a pointer to the object
5636 itself. If NULL is passed for the object, then the result is based on
5637 typical alignment for an object of the specified size. Otherwise return
5638 NULL*/
5639
5640 static rtx
5641 expand_builtin_atomic_is_lock_free (tree exp)
5642 {
5643 tree size;
5644 tree arg0 = CALL_EXPR_ARG (exp, 0);
5645 tree arg1 = CALL_EXPR_ARG (exp, 1);
5646
5647 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5648 {
5649 error ("non-integer argument 1 to __atomic_is_lock_free");
5650 return NULL_RTX;
5651 }
5652
5653 if (!flag_inline_atomics)
5654 return NULL_RTX;
5655
5656 /* If the value is known at compile time, return the RTX for it. */
5657 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5658 if (size == boolean_true_node)
5659 return const1_rtx;
5660
5661 return NULL_RTX;
5662 }
5663
5664 /* Expand the __atomic_thread_fence intrinsic:
5665 void __atomic_thread_fence (enum memmodel)
5666 EXP is the CALL_EXPR. */
5667
5668 static void
5669 expand_builtin_atomic_thread_fence (tree exp)
5670 {
5671 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5672 expand_mem_thread_fence (model);
5673 }
5674
5675 /* Expand the __atomic_signal_fence intrinsic:
5676 void __atomic_signal_fence (enum memmodel)
5677 EXP is the CALL_EXPR. */
5678
5679 static void
5680 expand_builtin_atomic_signal_fence (tree exp)
5681 {
5682 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5683 expand_mem_signal_fence (model);
5684 }
5685
5686 /* Expand the __sync_synchronize intrinsic. */
5687
5688 static void
5689 expand_builtin_sync_synchronize (void)
5690 {
5691 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5692 }
5693
5694 static rtx
5695 expand_builtin_thread_pointer (tree exp, rtx target)
5696 {
5697 enum insn_code icode;
5698 if (!validate_arglist (exp, VOID_TYPE))
5699 return const0_rtx;
5700 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5701 if (icode != CODE_FOR_nothing)
5702 {
5703 struct expand_operand op;
5704 if (!REG_P (target) || GET_MODE (target) != Pmode)
5705 target = gen_reg_rtx (Pmode);
5706 create_output_operand (&op, target, Pmode);
5707 expand_insn (icode, 1, &op);
5708 return target;
5709 }
5710 error ("__builtin_thread_pointer is not supported on this target");
5711 return const0_rtx;
5712 }
5713
5714 static void
5715 expand_builtin_set_thread_pointer (tree exp)
5716 {
5717 enum insn_code icode;
5718 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5719 return;
5720 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5721 if (icode != CODE_FOR_nothing)
5722 {
5723 struct expand_operand op;
5724 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5725 Pmode, EXPAND_NORMAL);
5726 create_input_operand (&op, val, Pmode);
5727 expand_insn (icode, 1, &op);
5728 return;
5729 }
5730 error ("__builtin_set_thread_pointer is not supported on this target");
5731 }
5732
5733 \f
5734 /* Emit code to restore the current value of stack. */
5735
5736 static void
5737 expand_stack_restore (tree var)
5738 {
5739 rtx prev, sa = expand_normal (var);
5740
5741 sa = convert_memory_address (Pmode, sa);
5742
5743 prev = get_last_insn ();
5744 emit_stack_restore (SAVE_BLOCK, sa);
5745 fixup_args_size_notes (prev, get_last_insn (), 0);
5746 }
5747
5748
5749 /* Emit code to save the current value of stack. */
5750
5751 static rtx
5752 expand_stack_save (void)
5753 {
5754 rtx ret = NULL_RTX;
5755
5756 do_pending_stack_adjust ();
5757 emit_stack_save (SAVE_BLOCK, &ret);
5758 return ret;
5759 }
5760
5761 /* Expand an expression EXP that calls a built-in function,
5762 with result going to TARGET if that's convenient
5763 (and in mode MODE if that's convenient).
5764 SUBTARGET may be used as the target for computing one of EXP's operands.
5765 IGNORE is nonzero if the value is to be ignored. */
5766
5767 rtx
5768 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5769 int ignore)
5770 {
5771 tree fndecl = get_callee_fndecl (exp);
5772 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5773 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5774 int flags;
5775
5776 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5777 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5778
5779 /* When not optimizing, generate calls to library functions for a certain
5780 set of builtins. */
5781 if (!optimize
5782 && !called_as_built_in (fndecl)
5783 && fcode != BUILT_IN_FORK
5784 && fcode != BUILT_IN_EXECL
5785 && fcode != BUILT_IN_EXECV
5786 && fcode != BUILT_IN_EXECLP
5787 && fcode != BUILT_IN_EXECLE
5788 && fcode != BUILT_IN_EXECVP
5789 && fcode != BUILT_IN_EXECVE
5790 && fcode != BUILT_IN_ALLOCA
5791 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5792 && fcode != BUILT_IN_FREE)
5793 return expand_call (exp, target, ignore);
5794
5795 /* The built-in function expanders test for target == const0_rtx
5796 to determine whether the function's result will be ignored. */
5797 if (ignore)
5798 target = const0_rtx;
5799
5800 /* If the result of a pure or const built-in function is ignored, and
5801 none of its arguments are volatile, we can avoid expanding the
5802 built-in call and just evaluate the arguments for side-effects. */
5803 if (target == const0_rtx
5804 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5805 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5806 {
5807 bool volatilep = false;
5808 tree arg;
5809 call_expr_arg_iterator iter;
5810
5811 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5812 if (TREE_THIS_VOLATILE (arg))
5813 {
5814 volatilep = true;
5815 break;
5816 }
5817
5818 if (! volatilep)
5819 {
5820 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5821 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5822 return const0_rtx;
5823 }
5824 }
5825
5826 switch (fcode)
5827 {
5828 CASE_FLT_FN (BUILT_IN_FABS):
5829 case BUILT_IN_FABSD32:
5830 case BUILT_IN_FABSD64:
5831 case BUILT_IN_FABSD128:
5832 target = expand_builtin_fabs (exp, target, subtarget);
5833 if (target)
5834 return target;
5835 break;
5836
5837 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5838 target = expand_builtin_copysign (exp, target, subtarget);
5839 if (target)
5840 return target;
5841 break;
5842
5843 /* Just do a normal library call if we were unable to fold
5844 the values. */
5845 CASE_FLT_FN (BUILT_IN_CABS):
5846 break;
5847
5848 CASE_FLT_FN (BUILT_IN_EXP):
5849 CASE_FLT_FN (BUILT_IN_EXP10):
5850 CASE_FLT_FN (BUILT_IN_POW10):
5851 CASE_FLT_FN (BUILT_IN_EXP2):
5852 CASE_FLT_FN (BUILT_IN_EXPM1):
5853 CASE_FLT_FN (BUILT_IN_LOGB):
5854 CASE_FLT_FN (BUILT_IN_LOG):
5855 CASE_FLT_FN (BUILT_IN_LOG10):
5856 CASE_FLT_FN (BUILT_IN_LOG2):
5857 CASE_FLT_FN (BUILT_IN_LOG1P):
5858 CASE_FLT_FN (BUILT_IN_TAN):
5859 CASE_FLT_FN (BUILT_IN_ASIN):
5860 CASE_FLT_FN (BUILT_IN_ACOS):
5861 CASE_FLT_FN (BUILT_IN_ATAN):
5862 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5863 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5864 because of possible accuracy problems. */
5865 if (! flag_unsafe_math_optimizations)
5866 break;
5867 CASE_FLT_FN (BUILT_IN_SQRT):
5868 CASE_FLT_FN (BUILT_IN_FLOOR):
5869 CASE_FLT_FN (BUILT_IN_CEIL):
5870 CASE_FLT_FN (BUILT_IN_TRUNC):
5871 CASE_FLT_FN (BUILT_IN_ROUND):
5872 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5873 CASE_FLT_FN (BUILT_IN_RINT):
5874 target = expand_builtin_mathfn (exp, target, subtarget);
5875 if (target)
5876 return target;
5877 break;
5878
5879 CASE_FLT_FN (BUILT_IN_FMA):
5880 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5881 if (target)
5882 return target;
5883 break;
5884
5885 CASE_FLT_FN (BUILT_IN_ILOGB):
5886 if (! flag_unsafe_math_optimizations)
5887 break;
5888 CASE_FLT_FN (BUILT_IN_ISINF):
5889 CASE_FLT_FN (BUILT_IN_FINITE):
5890 case BUILT_IN_ISFINITE:
5891 case BUILT_IN_ISNORMAL:
5892 target = expand_builtin_interclass_mathfn (exp, target);
5893 if (target)
5894 return target;
5895 break;
5896
5897 CASE_FLT_FN (BUILT_IN_ICEIL):
5898 CASE_FLT_FN (BUILT_IN_LCEIL):
5899 CASE_FLT_FN (BUILT_IN_LLCEIL):
5900 CASE_FLT_FN (BUILT_IN_LFLOOR):
5901 CASE_FLT_FN (BUILT_IN_IFLOOR):
5902 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5903 target = expand_builtin_int_roundingfn (exp, target);
5904 if (target)
5905 return target;
5906 break;
5907
5908 CASE_FLT_FN (BUILT_IN_IRINT):
5909 CASE_FLT_FN (BUILT_IN_LRINT):
5910 CASE_FLT_FN (BUILT_IN_LLRINT):
5911 CASE_FLT_FN (BUILT_IN_IROUND):
5912 CASE_FLT_FN (BUILT_IN_LROUND):
5913 CASE_FLT_FN (BUILT_IN_LLROUND):
5914 target = expand_builtin_int_roundingfn_2 (exp, target);
5915 if (target)
5916 return target;
5917 break;
5918
5919 CASE_FLT_FN (BUILT_IN_POWI):
5920 target = expand_builtin_powi (exp, target);
5921 if (target)
5922 return target;
5923 break;
5924
5925 CASE_FLT_FN (BUILT_IN_ATAN2):
5926 CASE_FLT_FN (BUILT_IN_LDEXP):
5927 CASE_FLT_FN (BUILT_IN_SCALB):
5928 CASE_FLT_FN (BUILT_IN_SCALBN):
5929 CASE_FLT_FN (BUILT_IN_SCALBLN):
5930 if (! flag_unsafe_math_optimizations)
5931 break;
5932
5933 CASE_FLT_FN (BUILT_IN_FMOD):
5934 CASE_FLT_FN (BUILT_IN_REMAINDER):
5935 CASE_FLT_FN (BUILT_IN_DREM):
5936 CASE_FLT_FN (BUILT_IN_POW):
5937 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5938 if (target)
5939 return target;
5940 break;
5941
5942 CASE_FLT_FN (BUILT_IN_CEXPI):
5943 target = expand_builtin_cexpi (exp, target);
5944 gcc_assert (target);
5945 return target;
5946
5947 CASE_FLT_FN (BUILT_IN_SIN):
5948 CASE_FLT_FN (BUILT_IN_COS):
5949 if (! flag_unsafe_math_optimizations)
5950 break;
5951 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5952 if (target)
5953 return target;
5954 break;
5955
5956 CASE_FLT_FN (BUILT_IN_SINCOS):
5957 if (! flag_unsafe_math_optimizations)
5958 break;
5959 target = expand_builtin_sincos (exp);
5960 if (target)
5961 return target;
5962 break;
5963
5964 case BUILT_IN_APPLY_ARGS:
5965 return expand_builtin_apply_args ();
5966
5967 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5968 FUNCTION with a copy of the parameters described by
5969 ARGUMENTS, and ARGSIZE. It returns a block of memory
5970 allocated on the stack into which is stored all the registers
5971 that might possibly be used for returning the result of a
5972 function. ARGUMENTS is the value returned by
5973 __builtin_apply_args. ARGSIZE is the number of bytes of
5974 arguments that must be copied. ??? How should this value be
5975 computed? We'll also need a safe worst case value for varargs
5976 functions. */
5977 case BUILT_IN_APPLY:
5978 if (!validate_arglist (exp, POINTER_TYPE,
5979 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5980 && !validate_arglist (exp, REFERENCE_TYPE,
5981 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5982 return const0_rtx;
5983 else
5984 {
5985 rtx ops[3];
5986
5987 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5988 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5989 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5990
5991 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5992 }
5993
5994 /* __builtin_return (RESULT) causes the function to return the
5995 value described by RESULT. RESULT is address of the block of
5996 memory returned by __builtin_apply. */
5997 case BUILT_IN_RETURN:
5998 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5999 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6000 return const0_rtx;
6001
6002 case BUILT_IN_SAVEREGS:
6003 return expand_builtin_saveregs ();
6004
6005 case BUILT_IN_VA_ARG_PACK:
6006 /* All valid uses of __builtin_va_arg_pack () are removed during
6007 inlining. */
6008 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6009 return const0_rtx;
6010
6011 case BUILT_IN_VA_ARG_PACK_LEN:
6012 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6013 inlining. */
6014 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6015 return const0_rtx;
6016
6017 /* Return the address of the first anonymous stack arg. */
6018 case BUILT_IN_NEXT_ARG:
6019 if (fold_builtin_next_arg (exp, false))
6020 return const0_rtx;
6021 return expand_builtin_next_arg ();
6022
6023 case BUILT_IN_CLEAR_CACHE:
6024 target = expand_builtin___clear_cache (exp);
6025 if (target)
6026 return target;
6027 break;
6028
6029 case BUILT_IN_CLASSIFY_TYPE:
6030 return expand_builtin_classify_type (exp);
6031
6032 case BUILT_IN_CONSTANT_P:
6033 return const0_rtx;
6034
6035 case BUILT_IN_FRAME_ADDRESS:
6036 case BUILT_IN_RETURN_ADDRESS:
6037 return expand_builtin_frame_address (fndecl, exp);
6038
6039 /* Returns the address of the area where the structure is returned.
6040 0 otherwise. */
6041 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6042 if (call_expr_nargs (exp) != 0
6043 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6044 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6045 return const0_rtx;
6046 else
6047 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6048
6049 case BUILT_IN_ALLOCA:
6050 case BUILT_IN_ALLOCA_WITH_ALIGN:
6051 /* If the allocation stems from the declaration of a variable-sized
6052 object, it cannot accumulate. */
6053 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6054 if (target)
6055 return target;
6056 break;
6057
6058 case BUILT_IN_STACK_SAVE:
6059 return expand_stack_save ();
6060
6061 case BUILT_IN_STACK_RESTORE:
6062 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6063 return const0_rtx;
6064
6065 case BUILT_IN_BSWAP16:
6066 case BUILT_IN_BSWAP32:
6067 case BUILT_IN_BSWAP64:
6068 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6069 if (target)
6070 return target;
6071 break;
6072
6073 CASE_INT_FN (BUILT_IN_FFS):
6074 target = expand_builtin_unop (target_mode, exp, target,
6075 subtarget, ffs_optab);
6076 if (target)
6077 return target;
6078 break;
6079
6080 CASE_INT_FN (BUILT_IN_CLZ):
6081 target = expand_builtin_unop (target_mode, exp, target,
6082 subtarget, clz_optab);
6083 if (target)
6084 return target;
6085 break;
6086
6087 CASE_INT_FN (BUILT_IN_CTZ):
6088 target = expand_builtin_unop (target_mode, exp, target,
6089 subtarget, ctz_optab);
6090 if (target)
6091 return target;
6092 break;
6093
6094 CASE_INT_FN (BUILT_IN_CLRSB):
6095 target = expand_builtin_unop (target_mode, exp, target,
6096 subtarget, clrsb_optab);
6097 if (target)
6098 return target;
6099 break;
6100
6101 CASE_INT_FN (BUILT_IN_POPCOUNT):
6102 target = expand_builtin_unop (target_mode, exp, target,
6103 subtarget, popcount_optab);
6104 if (target)
6105 return target;
6106 break;
6107
6108 CASE_INT_FN (BUILT_IN_PARITY):
6109 target = expand_builtin_unop (target_mode, exp, target,
6110 subtarget, parity_optab);
6111 if (target)
6112 return target;
6113 break;
6114
6115 case BUILT_IN_STRLEN:
6116 target = expand_builtin_strlen (exp, target, target_mode);
6117 if (target)
6118 return target;
6119 break;
6120
6121 case BUILT_IN_STRCPY:
6122 target = expand_builtin_strcpy (exp, target);
6123 if (target)
6124 return target;
6125 break;
6126
6127 case BUILT_IN_STRNCPY:
6128 target = expand_builtin_strncpy (exp, target);
6129 if (target)
6130 return target;
6131 break;
6132
6133 case BUILT_IN_STPCPY:
6134 target = expand_builtin_stpcpy (exp, target, mode);
6135 if (target)
6136 return target;
6137 break;
6138
6139 case BUILT_IN_MEMCPY:
6140 target = expand_builtin_memcpy (exp, target);
6141 if (target)
6142 return target;
6143 break;
6144
6145 case BUILT_IN_MEMPCPY:
6146 target = expand_builtin_mempcpy (exp, target, mode);
6147 if (target)
6148 return target;
6149 break;
6150
6151 case BUILT_IN_MEMSET:
6152 target = expand_builtin_memset (exp, target, mode);
6153 if (target)
6154 return target;
6155 break;
6156
6157 case BUILT_IN_BZERO:
6158 target = expand_builtin_bzero (exp);
6159 if (target)
6160 return target;
6161 break;
6162
6163 case BUILT_IN_STRCMP:
6164 target = expand_builtin_strcmp (exp, target);
6165 if (target)
6166 return target;
6167 break;
6168
6169 case BUILT_IN_STRNCMP:
6170 target = expand_builtin_strncmp (exp, target, mode);
6171 if (target)
6172 return target;
6173 break;
6174
6175 case BUILT_IN_BCMP:
6176 case BUILT_IN_MEMCMP:
6177 target = expand_builtin_memcmp (exp, target, mode);
6178 if (target)
6179 return target;
6180 break;
6181
6182 case BUILT_IN_SETJMP:
6183 /* This should have been lowered to the builtins below. */
6184 gcc_unreachable ();
6185
6186 case BUILT_IN_SETJMP_SETUP:
6187 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6188 and the receiver label. */
6189 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6190 {
6191 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6192 VOIDmode, EXPAND_NORMAL);
6193 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6194 rtx label_r = label_rtx (label);
6195
6196 /* This is copied from the handling of non-local gotos. */
6197 expand_builtin_setjmp_setup (buf_addr, label_r);
6198 nonlocal_goto_handler_labels
6199 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6200 nonlocal_goto_handler_labels);
6201 /* ??? Do not let expand_label treat us as such since we would
6202 not want to be both on the list of non-local labels and on
6203 the list of forced labels. */
6204 FORCED_LABEL (label) = 0;
6205 return const0_rtx;
6206 }
6207 break;
6208
6209 case BUILT_IN_SETJMP_DISPATCHER:
6210 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6211 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6212 {
6213 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6214 rtx label_r = label_rtx (label);
6215
6216 /* Remove the dispatcher label from the list of non-local labels
6217 since the receiver labels have been added to it above. */
6218 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6219 return const0_rtx;
6220 }
6221 break;
6222
6223 case BUILT_IN_SETJMP_RECEIVER:
6224 /* __builtin_setjmp_receiver is passed the receiver label. */
6225 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6226 {
6227 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6228 rtx label_r = label_rtx (label);
6229
6230 expand_builtin_setjmp_receiver (label_r);
6231 return const0_rtx;
6232 }
6233 break;
6234
6235 /* __builtin_longjmp is passed a pointer to an array of five words.
6236 It's similar to the C library longjmp function but works with
6237 __builtin_setjmp above. */
6238 case BUILT_IN_LONGJMP:
6239 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6240 {
6241 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6242 VOIDmode, EXPAND_NORMAL);
6243 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6244
6245 if (value != const1_rtx)
6246 {
6247 error ("%<__builtin_longjmp%> second argument must be 1");
6248 return const0_rtx;
6249 }
6250
6251 expand_builtin_longjmp (buf_addr, value);
6252 return const0_rtx;
6253 }
6254 break;
6255
6256 case BUILT_IN_NONLOCAL_GOTO:
6257 target = expand_builtin_nonlocal_goto (exp);
6258 if (target)
6259 return target;
6260 break;
6261
6262 /* This updates the setjmp buffer that is its argument with the value
6263 of the current stack pointer. */
6264 case BUILT_IN_UPDATE_SETJMP_BUF:
6265 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6266 {
6267 rtx buf_addr
6268 = expand_normal (CALL_EXPR_ARG (exp, 0));
6269
6270 expand_builtin_update_setjmp_buf (buf_addr);
6271 return const0_rtx;
6272 }
6273 break;
6274
6275 case BUILT_IN_TRAP:
6276 expand_builtin_trap ();
6277 return const0_rtx;
6278
6279 case BUILT_IN_UNREACHABLE:
6280 expand_builtin_unreachable ();
6281 return const0_rtx;
6282
6283 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6284 case BUILT_IN_SIGNBITD32:
6285 case BUILT_IN_SIGNBITD64:
6286 case BUILT_IN_SIGNBITD128:
6287 target = expand_builtin_signbit (exp, target);
6288 if (target)
6289 return target;
6290 break;
6291
6292 /* Various hooks for the DWARF 2 __throw routine. */
6293 case BUILT_IN_UNWIND_INIT:
6294 expand_builtin_unwind_init ();
6295 return const0_rtx;
6296 case BUILT_IN_DWARF_CFA:
6297 return virtual_cfa_rtx;
6298 #ifdef DWARF2_UNWIND_INFO
6299 case BUILT_IN_DWARF_SP_COLUMN:
6300 return expand_builtin_dwarf_sp_column ();
6301 case BUILT_IN_INIT_DWARF_REG_SIZES:
6302 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6303 return const0_rtx;
6304 #endif
6305 case BUILT_IN_FROB_RETURN_ADDR:
6306 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6307 case BUILT_IN_EXTRACT_RETURN_ADDR:
6308 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6309 case BUILT_IN_EH_RETURN:
6310 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6311 CALL_EXPR_ARG (exp, 1));
6312 return const0_rtx;
6313 #ifdef EH_RETURN_DATA_REGNO
6314 case BUILT_IN_EH_RETURN_DATA_REGNO:
6315 return expand_builtin_eh_return_data_regno (exp);
6316 #endif
6317 case BUILT_IN_EXTEND_POINTER:
6318 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6319 case BUILT_IN_EH_POINTER:
6320 return expand_builtin_eh_pointer (exp);
6321 case BUILT_IN_EH_FILTER:
6322 return expand_builtin_eh_filter (exp);
6323 case BUILT_IN_EH_COPY_VALUES:
6324 return expand_builtin_eh_copy_values (exp);
6325
6326 case BUILT_IN_VA_START:
6327 return expand_builtin_va_start (exp);
6328 case BUILT_IN_VA_END:
6329 return expand_builtin_va_end (exp);
6330 case BUILT_IN_VA_COPY:
6331 return expand_builtin_va_copy (exp);
6332 case BUILT_IN_EXPECT:
6333 return expand_builtin_expect (exp, target);
6334 case BUILT_IN_ASSUME_ALIGNED:
6335 return expand_builtin_assume_aligned (exp, target);
6336 case BUILT_IN_PREFETCH:
6337 expand_builtin_prefetch (exp);
6338 return const0_rtx;
6339
6340 case BUILT_IN_INIT_TRAMPOLINE:
6341 return expand_builtin_init_trampoline (exp, true);
6342 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6343 return expand_builtin_init_trampoline (exp, false);
6344 case BUILT_IN_ADJUST_TRAMPOLINE:
6345 return expand_builtin_adjust_trampoline (exp);
6346
6347 case BUILT_IN_FORK:
6348 case BUILT_IN_EXECL:
6349 case BUILT_IN_EXECV:
6350 case BUILT_IN_EXECLP:
6351 case BUILT_IN_EXECLE:
6352 case BUILT_IN_EXECVP:
6353 case BUILT_IN_EXECVE:
6354 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6355 if (target)
6356 return target;
6357 break;
6358
6359 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6360 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6361 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6362 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6363 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6364 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6365 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6366 if (target)
6367 return target;
6368 break;
6369
6370 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6371 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6372 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6373 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6374 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6375 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6376 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6377 if (target)
6378 return target;
6379 break;
6380
6381 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6382 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6383 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6384 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6385 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6386 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6387 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6388 if (target)
6389 return target;
6390 break;
6391
6392 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6393 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6394 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6395 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6396 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6397 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6398 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6399 if (target)
6400 return target;
6401 break;
6402
6403 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6404 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6405 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6406 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6407 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6408 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6409 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6410 if (target)
6411 return target;
6412 break;
6413
6414 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6415 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6416 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6417 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6418 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6419 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6420 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6421 if (target)
6422 return target;
6423 break;
6424
6425 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6426 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6427 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6428 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6429 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6430 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6431 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6432 if (target)
6433 return target;
6434 break;
6435
6436 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6437 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6438 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6439 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6440 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6441 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6442 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6443 if (target)
6444 return target;
6445 break;
6446
6447 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6448 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6449 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6450 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6451 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6452 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6453 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6454 if (target)
6455 return target;
6456 break;
6457
6458 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6459 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6460 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6461 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6462 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6463 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6464 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6465 if (target)
6466 return target;
6467 break;
6468
6469 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6470 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6471 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6472 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6473 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6474 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6475 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6476 if (target)
6477 return target;
6478 break;
6479
6480 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6481 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6482 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6483 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6484 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6486 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6487 if (target)
6488 return target;
6489 break;
6490
6491 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6492 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6493 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6494 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6495 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6496 if (mode == VOIDmode)
6497 mode = TYPE_MODE (boolean_type_node);
6498 if (!target || !register_operand (target, mode))
6499 target = gen_reg_rtx (mode);
6500
6501 mode = get_builtin_sync_mode
6502 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6503 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6504 if (target)
6505 return target;
6506 break;
6507
6508 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6509 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6510 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6511 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6512 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6513 mode = get_builtin_sync_mode
6514 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6515 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6516 if (target)
6517 return target;
6518 break;
6519
6520 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6521 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6522 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6523 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6524 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6525 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6526 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6527 if (target)
6528 return target;
6529 break;
6530
6531 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6532 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6533 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6534 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6535 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6536 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6537 expand_builtin_sync_lock_release (mode, exp);
6538 return const0_rtx;
6539
6540 case BUILT_IN_SYNC_SYNCHRONIZE:
6541 expand_builtin_sync_synchronize ();
6542 return const0_rtx;
6543
6544 case BUILT_IN_ATOMIC_EXCHANGE_1:
6545 case BUILT_IN_ATOMIC_EXCHANGE_2:
6546 case BUILT_IN_ATOMIC_EXCHANGE_4:
6547 case BUILT_IN_ATOMIC_EXCHANGE_8:
6548 case BUILT_IN_ATOMIC_EXCHANGE_16:
6549 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6550 target = expand_builtin_atomic_exchange (mode, exp, target);
6551 if (target)
6552 return target;
6553 break;
6554
6555 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6556 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6557 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6558 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6559 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6560 {
6561 unsigned int nargs, z;
6562 vec<tree, va_gc> *vec;
6563
6564 mode =
6565 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6566 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6567 if (target)
6568 return target;
6569
6570 /* If this is turned into an external library call, the weak parameter
6571 must be dropped to match the expected parameter list. */
6572 nargs = call_expr_nargs (exp);
6573 vec_alloc (vec, nargs - 1);
6574 for (z = 0; z < 3; z++)
6575 vec->quick_push (CALL_EXPR_ARG (exp, z));
6576 /* Skip the boolean weak parameter. */
6577 for (z = 4; z < 6; z++)
6578 vec->quick_push (CALL_EXPR_ARG (exp, z));
6579 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6580 break;
6581 }
6582
6583 case BUILT_IN_ATOMIC_LOAD_1:
6584 case BUILT_IN_ATOMIC_LOAD_2:
6585 case BUILT_IN_ATOMIC_LOAD_4:
6586 case BUILT_IN_ATOMIC_LOAD_8:
6587 case BUILT_IN_ATOMIC_LOAD_16:
6588 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6589 target = expand_builtin_atomic_load (mode, exp, target);
6590 if (target)
6591 return target;
6592 break;
6593
6594 case BUILT_IN_ATOMIC_STORE_1:
6595 case BUILT_IN_ATOMIC_STORE_2:
6596 case BUILT_IN_ATOMIC_STORE_4:
6597 case BUILT_IN_ATOMIC_STORE_8:
6598 case BUILT_IN_ATOMIC_STORE_16:
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6600 target = expand_builtin_atomic_store (mode, exp);
6601 if (target)
6602 return const0_rtx;
6603 break;
6604
6605 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6606 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6607 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6608 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6609 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6610 {
6611 enum built_in_function lib;
6612 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6613 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6614 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6615 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6616 ignore, lib);
6617 if (target)
6618 return target;
6619 break;
6620 }
6621 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6622 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6623 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6624 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6625 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6626 {
6627 enum built_in_function lib;
6628 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6629 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6630 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6631 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6632 ignore, lib);
6633 if (target)
6634 return target;
6635 break;
6636 }
6637 case BUILT_IN_ATOMIC_AND_FETCH_1:
6638 case BUILT_IN_ATOMIC_AND_FETCH_2:
6639 case BUILT_IN_ATOMIC_AND_FETCH_4:
6640 case BUILT_IN_ATOMIC_AND_FETCH_8:
6641 case BUILT_IN_ATOMIC_AND_FETCH_16:
6642 {
6643 enum built_in_function lib;
6644 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6645 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6646 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6647 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6648 ignore, lib);
6649 if (target)
6650 return target;
6651 break;
6652 }
6653 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6654 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6655 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6656 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6657 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6658 {
6659 enum built_in_function lib;
6660 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6661 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6662 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6663 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6664 ignore, lib);
6665 if (target)
6666 return target;
6667 break;
6668 }
6669 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6670 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6671 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6672 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6673 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6674 {
6675 enum built_in_function lib;
6676 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6677 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6678 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6679 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6680 ignore, lib);
6681 if (target)
6682 return target;
6683 break;
6684 }
6685 case BUILT_IN_ATOMIC_OR_FETCH_1:
6686 case BUILT_IN_ATOMIC_OR_FETCH_2:
6687 case BUILT_IN_ATOMIC_OR_FETCH_4:
6688 case BUILT_IN_ATOMIC_OR_FETCH_8:
6689 case BUILT_IN_ATOMIC_OR_FETCH_16:
6690 {
6691 enum built_in_function lib;
6692 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6693 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6694 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6695 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6696 ignore, lib);
6697 if (target)
6698 return target;
6699 break;
6700 }
6701 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6702 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6703 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6704 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6705 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6706 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6707 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6708 ignore, BUILT_IN_NONE);
6709 if (target)
6710 return target;
6711 break;
6712
6713 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6714 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6715 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6716 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6717 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6719 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6720 ignore, BUILT_IN_NONE);
6721 if (target)
6722 return target;
6723 break;
6724
6725 case BUILT_IN_ATOMIC_FETCH_AND_1:
6726 case BUILT_IN_ATOMIC_FETCH_AND_2:
6727 case BUILT_IN_ATOMIC_FETCH_AND_4:
6728 case BUILT_IN_ATOMIC_FETCH_AND_8:
6729 case BUILT_IN_ATOMIC_FETCH_AND_16:
6730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6732 ignore, BUILT_IN_NONE);
6733 if (target)
6734 return target;
6735 break;
6736
6737 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6738 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6739 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6740 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6741 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6742 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6744 ignore, BUILT_IN_NONE);
6745 if (target)
6746 return target;
6747 break;
6748
6749 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6750 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6751 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6752 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6753 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6754 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6755 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6756 ignore, BUILT_IN_NONE);
6757 if (target)
6758 return target;
6759 break;
6760
6761 case BUILT_IN_ATOMIC_FETCH_OR_1:
6762 case BUILT_IN_ATOMIC_FETCH_OR_2:
6763 case BUILT_IN_ATOMIC_FETCH_OR_4:
6764 case BUILT_IN_ATOMIC_FETCH_OR_8:
6765 case BUILT_IN_ATOMIC_FETCH_OR_16:
6766 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6767 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6768 ignore, BUILT_IN_NONE);
6769 if (target)
6770 return target;
6771 break;
6772
6773 case BUILT_IN_ATOMIC_TEST_AND_SET:
6774 return expand_builtin_atomic_test_and_set (exp, target);
6775
6776 case BUILT_IN_ATOMIC_CLEAR:
6777 return expand_builtin_atomic_clear (exp);
6778
6779 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6780 return expand_builtin_atomic_always_lock_free (exp);
6781
6782 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6783 target = expand_builtin_atomic_is_lock_free (exp);
6784 if (target)
6785 return target;
6786 break;
6787
6788 case BUILT_IN_ATOMIC_THREAD_FENCE:
6789 expand_builtin_atomic_thread_fence (exp);
6790 return const0_rtx;
6791
6792 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6793 expand_builtin_atomic_signal_fence (exp);
6794 return const0_rtx;
6795
6796 case BUILT_IN_OBJECT_SIZE:
6797 return expand_builtin_object_size (exp);
6798
6799 case BUILT_IN_MEMCPY_CHK:
6800 case BUILT_IN_MEMPCPY_CHK:
6801 case BUILT_IN_MEMMOVE_CHK:
6802 case BUILT_IN_MEMSET_CHK:
6803 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6804 if (target)
6805 return target;
6806 break;
6807
6808 case BUILT_IN_STRCPY_CHK:
6809 case BUILT_IN_STPCPY_CHK:
6810 case BUILT_IN_STRNCPY_CHK:
6811 case BUILT_IN_STPNCPY_CHK:
6812 case BUILT_IN_STRCAT_CHK:
6813 case BUILT_IN_STRNCAT_CHK:
6814 case BUILT_IN_SNPRINTF_CHK:
6815 case BUILT_IN_VSNPRINTF_CHK:
6816 maybe_emit_chk_warning (exp, fcode);
6817 break;
6818
6819 case BUILT_IN_SPRINTF_CHK:
6820 case BUILT_IN_VSPRINTF_CHK:
6821 maybe_emit_sprintf_chk_warning (exp, fcode);
6822 break;
6823
6824 case BUILT_IN_FREE:
6825 if (warn_free_nonheap_object)
6826 maybe_emit_free_warning (exp);
6827 break;
6828
6829 case BUILT_IN_THREAD_POINTER:
6830 return expand_builtin_thread_pointer (exp, target);
6831
6832 case BUILT_IN_SET_THREAD_POINTER:
6833 expand_builtin_set_thread_pointer (exp);
6834 return const0_rtx;
6835
6836 case BUILT_IN_CILK_DETACH:
6837 expand_builtin_cilk_detach (exp);
6838 return const0_rtx;
6839
6840 case BUILT_IN_CILK_POP_FRAME:
6841 expand_builtin_cilk_pop_frame (exp);
6842 return const0_rtx;
6843
6844 default: /* just do library call, if unknown builtin */
6845 break;
6846 }
6847
6848 /* The switch statement above can drop through to cause the function
6849 to be called normally. */
6850 return expand_call (exp, target, ignore);
6851 }
6852
6853 /* Determine whether a tree node represents a call to a built-in
6854 function. If the tree T is a call to a built-in function with
6855 the right number of arguments of the appropriate types, return
6856 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6857 Otherwise the return value is END_BUILTINS. */
6858
6859 enum built_in_function
6860 builtin_mathfn_code (const_tree t)
6861 {
6862 const_tree fndecl, arg, parmlist;
6863 const_tree argtype, parmtype;
6864 const_call_expr_arg_iterator iter;
6865
6866 if (TREE_CODE (t) != CALL_EXPR
6867 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6868 return END_BUILTINS;
6869
6870 fndecl = get_callee_fndecl (t);
6871 if (fndecl == NULL_TREE
6872 || TREE_CODE (fndecl) != FUNCTION_DECL
6873 || ! DECL_BUILT_IN (fndecl)
6874 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6875 return END_BUILTINS;
6876
6877 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6878 init_const_call_expr_arg_iterator (t, &iter);
6879 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6880 {
6881 /* If a function doesn't take a variable number of arguments,
6882 the last element in the list will have type `void'. */
6883 parmtype = TREE_VALUE (parmlist);
6884 if (VOID_TYPE_P (parmtype))
6885 {
6886 if (more_const_call_expr_args_p (&iter))
6887 return END_BUILTINS;
6888 return DECL_FUNCTION_CODE (fndecl);
6889 }
6890
6891 if (! more_const_call_expr_args_p (&iter))
6892 return END_BUILTINS;
6893
6894 arg = next_const_call_expr_arg (&iter);
6895 argtype = TREE_TYPE (arg);
6896
6897 if (SCALAR_FLOAT_TYPE_P (parmtype))
6898 {
6899 if (! SCALAR_FLOAT_TYPE_P (argtype))
6900 return END_BUILTINS;
6901 }
6902 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6903 {
6904 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6905 return END_BUILTINS;
6906 }
6907 else if (POINTER_TYPE_P (parmtype))
6908 {
6909 if (! POINTER_TYPE_P (argtype))
6910 return END_BUILTINS;
6911 }
6912 else if (INTEGRAL_TYPE_P (parmtype))
6913 {
6914 if (! INTEGRAL_TYPE_P (argtype))
6915 return END_BUILTINS;
6916 }
6917 else
6918 return END_BUILTINS;
6919 }
6920
6921 /* Variable-length argument list. */
6922 return DECL_FUNCTION_CODE (fndecl);
6923 }
6924
6925 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6926 evaluate to a constant. */
6927
6928 static tree
6929 fold_builtin_constant_p (tree arg)
6930 {
6931 /* We return 1 for a numeric type that's known to be a constant
6932 value at compile-time or for an aggregate type that's a
6933 literal constant. */
6934 STRIP_NOPS (arg);
6935
6936 /* If we know this is a constant, emit the constant of one. */
6937 if (CONSTANT_CLASS_P (arg)
6938 || (TREE_CODE (arg) == CONSTRUCTOR
6939 && TREE_CONSTANT (arg)))
6940 return integer_one_node;
6941 if (TREE_CODE (arg) == ADDR_EXPR)
6942 {
6943 tree op = TREE_OPERAND (arg, 0);
6944 if (TREE_CODE (op) == STRING_CST
6945 || (TREE_CODE (op) == ARRAY_REF
6946 && integer_zerop (TREE_OPERAND (op, 1))
6947 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6948 return integer_one_node;
6949 }
6950
6951 /* If this expression has side effects, show we don't know it to be a
6952 constant. Likewise if it's a pointer or aggregate type since in
6953 those case we only want literals, since those are only optimized
6954 when generating RTL, not later.
6955 And finally, if we are compiling an initializer, not code, we
6956 need to return a definite result now; there's not going to be any
6957 more optimization done. */
6958 if (TREE_SIDE_EFFECTS (arg)
6959 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6960 || POINTER_TYPE_P (TREE_TYPE (arg))
6961 || cfun == 0
6962 || folding_initializer
6963 || force_folding_builtin_constant_p)
6964 return integer_zero_node;
6965
6966 return NULL_TREE;
6967 }
6968
6969 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6970 return it as a truthvalue. */
6971
6972 static tree
6973 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6974 {
6975 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6976
6977 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6978 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6979 ret_type = TREE_TYPE (TREE_TYPE (fn));
6980 pred_type = TREE_VALUE (arg_types);
6981 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6982
6983 pred = fold_convert_loc (loc, pred_type, pred);
6984 expected = fold_convert_loc (loc, expected_type, expected);
6985 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6986
6987 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6988 build_int_cst (ret_type, 0));
6989 }
6990
6991 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6992 NULL_TREE if no simplification is possible. */
6993
6994 static tree
6995 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6996 {
6997 tree inner, fndecl, inner_arg0;
6998 enum tree_code code;
6999
7000 /* Distribute the expected value over short-circuiting operators.
7001 See through the cast from truthvalue_type_node to long. */
7002 inner_arg0 = arg0;
7003 while (TREE_CODE (inner_arg0) == NOP_EXPR
7004 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7005 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7006 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7007
7008 /* If this is a builtin_expect within a builtin_expect keep the
7009 inner one. See through a comparison against a constant. It
7010 might have been added to create a thruthvalue. */
7011 inner = inner_arg0;
7012
7013 if (COMPARISON_CLASS_P (inner)
7014 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7015 inner = TREE_OPERAND (inner, 0);
7016
7017 if (TREE_CODE (inner) == CALL_EXPR
7018 && (fndecl = get_callee_fndecl (inner))
7019 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7020 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7021 return arg0;
7022
7023 inner = inner_arg0;
7024 code = TREE_CODE (inner);
7025 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7026 {
7027 tree op0 = TREE_OPERAND (inner, 0);
7028 tree op1 = TREE_OPERAND (inner, 1);
7029
7030 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7031 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7032 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7033
7034 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7035 }
7036
7037 /* If the argument isn't invariant then there's nothing else we can do. */
7038 if (!TREE_CONSTANT (inner_arg0))
7039 return NULL_TREE;
7040
7041 /* If we expect that a comparison against the argument will fold to
7042 a constant return the constant. In practice, this means a true
7043 constant or the address of a non-weak symbol. */
7044 inner = inner_arg0;
7045 STRIP_NOPS (inner);
7046 if (TREE_CODE (inner) == ADDR_EXPR)
7047 {
7048 do
7049 {
7050 inner = TREE_OPERAND (inner, 0);
7051 }
7052 while (TREE_CODE (inner) == COMPONENT_REF
7053 || TREE_CODE (inner) == ARRAY_REF);
7054 if ((TREE_CODE (inner) == VAR_DECL
7055 || TREE_CODE (inner) == FUNCTION_DECL)
7056 && DECL_WEAK (inner))
7057 return NULL_TREE;
7058 }
7059
7060 /* Otherwise, ARG0 already has the proper type for the return value. */
7061 return arg0;
7062 }
7063
7064 /* Fold a call to __builtin_classify_type with argument ARG. */
7065
7066 static tree
7067 fold_builtin_classify_type (tree arg)
7068 {
7069 if (arg == 0)
7070 return build_int_cst (integer_type_node, no_type_class);
7071
7072 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7073 }
7074
7075 /* Fold a call to __builtin_strlen with argument ARG. */
7076
7077 static tree
7078 fold_builtin_strlen (location_t loc, tree type, tree arg)
7079 {
7080 if (!validate_arg (arg, POINTER_TYPE))
7081 return NULL_TREE;
7082 else
7083 {
7084 tree len = c_strlen (arg, 0);
7085
7086 if (len)
7087 return fold_convert_loc (loc, type, len);
7088
7089 return NULL_TREE;
7090 }
7091 }
7092
7093 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7094
7095 static tree
7096 fold_builtin_inf (location_t loc, tree type, int warn)
7097 {
7098 REAL_VALUE_TYPE real;
7099
7100 /* __builtin_inff is intended to be usable to define INFINITY on all
7101 targets. If an infinity is not available, INFINITY expands "to a
7102 positive constant of type float that overflows at translation
7103 time", footnote "In this case, using INFINITY will violate the
7104 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7105 Thus we pedwarn to ensure this constraint violation is
7106 diagnosed. */
7107 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7108 pedwarn (loc, 0, "target format does not support infinity");
7109
7110 real_inf (&real);
7111 return build_real (type, real);
7112 }
7113
7114 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7115
7116 static tree
7117 fold_builtin_nan (tree arg, tree type, int quiet)
7118 {
7119 REAL_VALUE_TYPE real;
7120 const char *str;
7121
7122 if (!validate_arg (arg, POINTER_TYPE))
7123 return NULL_TREE;
7124 str = c_getstr (arg);
7125 if (!str)
7126 return NULL_TREE;
7127
7128 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7129 return NULL_TREE;
7130
7131 return build_real (type, real);
7132 }
7133
7134 /* Return true if the floating point expression T has an integer value.
7135 We also allow +Inf, -Inf and NaN to be considered integer values. */
7136
7137 static bool
7138 integer_valued_real_p (tree t)
7139 {
7140 switch (TREE_CODE (t))
7141 {
7142 case FLOAT_EXPR:
7143 return true;
7144
7145 case ABS_EXPR:
7146 case SAVE_EXPR:
7147 return integer_valued_real_p (TREE_OPERAND (t, 0));
7148
7149 case COMPOUND_EXPR:
7150 case MODIFY_EXPR:
7151 case BIND_EXPR:
7152 return integer_valued_real_p (TREE_OPERAND (t, 1));
7153
7154 case PLUS_EXPR:
7155 case MINUS_EXPR:
7156 case MULT_EXPR:
7157 case MIN_EXPR:
7158 case MAX_EXPR:
7159 return integer_valued_real_p (TREE_OPERAND (t, 0))
7160 && integer_valued_real_p (TREE_OPERAND (t, 1));
7161
7162 case COND_EXPR:
7163 return integer_valued_real_p (TREE_OPERAND (t, 1))
7164 && integer_valued_real_p (TREE_OPERAND (t, 2));
7165
7166 case REAL_CST:
7167 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7168
7169 case NOP_EXPR:
7170 {
7171 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7172 if (TREE_CODE (type) == INTEGER_TYPE)
7173 return true;
7174 if (TREE_CODE (type) == REAL_TYPE)
7175 return integer_valued_real_p (TREE_OPERAND (t, 0));
7176 break;
7177 }
7178
7179 case CALL_EXPR:
7180 switch (builtin_mathfn_code (t))
7181 {
7182 CASE_FLT_FN (BUILT_IN_CEIL):
7183 CASE_FLT_FN (BUILT_IN_FLOOR):
7184 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7185 CASE_FLT_FN (BUILT_IN_RINT):
7186 CASE_FLT_FN (BUILT_IN_ROUND):
7187 CASE_FLT_FN (BUILT_IN_TRUNC):
7188 return true;
7189
7190 CASE_FLT_FN (BUILT_IN_FMIN):
7191 CASE_FLT_FN (BUILT_IN_FMAX):
7192 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7193 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7194
7195 default:
7196 break;
7197 }
7198 break;
7199
7200 default:
7201 break;
7202 }
7203 return false;
7204 }
7205
7206 /* FNDECL is assumed to be a builtin where truncation can be propagated
7207 across (for instance floor((double)f) == (double)floorf (f).
7208 Do the transformation for a call with argument ARG. */
7209
7210 static tree
7211 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7212 {
7213 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7214
7215 if (!validate_arg (arg, REAL_TYPE))
7216 return NULL_TREE;
7217
7218 /* Integer rounding functions are idempotent. */
7219 if (fcode == builtin_mathfn_code (arg))
7220 return arg;
7221
7222 /* If argument is already integer valued, and we don't need to worry
7223 about setting errno, there's no need to perform rounding. */
7224 if (! flag_errno_math && integer_valued_real_p (arg))
7225 return arg;
7226
7227 if (optimize)
7228 {
7229 tree arg0 = strip_float_extensions (arg);
7230 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7231 tree newtype = TREE_TYPE (arg0);
7232 tree decl;
7233
7234 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7235 && (decl = mathfn_built_in (newtype, fcode)))
7236 return fold_convert_loc (loc, ftype,
7237 build_call_expr_loc (loc, decl, 1,
7238 fold_convert_loc (loc,
7239 newtype,
7240 arg0)));
7241 }
7242 return NULL_TREE;
7243 }
7244
7245 /* FNDECL is assumed to be builtin which can narrow the FP type of
7246 the argument, for instance lround((double)f) -> lroundf (f).
7247 Do the transformation for a call with argument ARG. */
7248
7249 static tree
7250 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7251 {
7252 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7253
7254 if (!validate_arg (arg, REAL_TYPE))
7255 return NULL_TREE;
7256
7257 /* If argument is already integer valued, and we don't need to worry
7258 about setting errno, there's no need to perform rounding. */
7259 if (! flag_errno_math && integer_valued_real_p (arg))
7260 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7261 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7262
7263 if (optimize)
7264 {
7265 tree ftype = TREE_TYPE (arg);
7266 tree arg0 = strip_float_extensions (arg);
7267 tree newtype = TREE_TYPE (arg0);
7268 tree decl;
7269
7270 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7271 && (decl = mathfn_built_in (newtype, fcode)))
7272 return build_call_expr_loc (loc, decl, 1,
7273 fold_convert_loc (loc, newtype, arg0));
7274 }
7275
7276 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7277 sizeof (int) == sizeof (long). */
7278 if (TYPE_PRECISION (integer_type_node)
7279 == TYPE_PRECISION (long_integer_type_node))
7280 {
7281 tree newfn = NULL_TREE;
7282 switch (fcode)
7283 {
7284 CASE_FLT_FN (BUILT_IN_ICEIL):
7285 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7286 break;
7287
7288 CASE_FLT_FN (BUILT_IN_IFLOOR):
7289 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7290 break;
7291
7292 CASE_FLT_FN (BUILT_IN_IROUND):
7293 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7294 break;
7295
7296 CASE_FLT_FN (BUILT_IN_IRINT):
7297 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7298 break;
7299
7300 default:
7301 break;
7302 }
7303
7304 if (newfn)
7305 {
7306 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7307 return fold_convert_loc (loc,
7308 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7309 }
7310 }
7311
7312 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7313 sizeof (long long) == sizeof (long). */
7314 if (TYPE_PRECISION (long_long_integer_type_node)
7315 == TYPE_PRECISION (long_integer_type_node))
7316 {
7317 tree newfn = NULL_TREE;
7318 switch (fcode)
7319 {
7320 CASE_FLT_FN (BUILT_IN_LLCEIL):
7321 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7322 break;
7323
7324 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7325 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7326 break;
7327
7328 CASE_FLT_FN (BUILT_IN_LLROUND):
7329 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7330 break;
7331
7332 CASE_FLT_FN (BUILT_IN_LLRINT):
7333 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7334 break;
7335
7336 default:
7337 break;
7338 }
7339
7340 if (newfn)
7341 {
7342 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7343 return fold_convert_loc (loc,
7344 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7345 }
7346 }
7347
7348 return NULL_TREE;
7349 }
7350
7351 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7352 return type. Return NULL_TREE if no simplification can be made. */
7353
7354 static tree
7355 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7356 {
7357 tree res;
7358
7359 if (!validate_arg (arg, COMPLEX_TYPE)
7360 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7361 return NULL_TREE;
7362
7363 /* Calculate the result when the argument is a constant. */
7364 if (TREE_CODE (arg) == COMPLEX_CST
7365 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7366 type, mpfr_hypot)))
7367 return res;
7368
7369 if (TREE_CODE (arg) == COMPLEX_EXPR)
7370 {
7371 tree real = TREE_OPERAND (arg, 0);
7372 tree imag = TREE_OPERAND (arg, 1);
7373
7374 /* If either part is zero, cabs is fabs of the other. */
7375 if (real_zerop (real))
7376 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7377 if (real_zerop (imag))
7378 return fold_build1_loc (loc, ABS_EXPR, type, real);
7379
7380 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7381 if (flag_unsafe_math_optimizations
7382 && operand_equal_p (real, imag, OEP_PURE_SAME))
7383 {
7384 const REAL_VALUE_TYPE sqrt2_trunc
7385 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7386 STRIP_NOPS (real);
7387 return fold_build2_loc (loc, MULT_EXPR, type,
7388 fold_build1_loc (loc, ABS_EXPR, type, real),
7389 build_real (type, sqrt2_trunc));
7390 }
7391 }
7392
7393 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7394 if (TREE_CODE (arg) == NEGATE_EXPR
7395 || TREE_CODE (arg) == CONJ_EXPR)
7396 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7397
7398 /* Don't do this when optimizing for size. */
7399 if (flag_unsafe_math_optimizations
7400 && optimize && optimize_function_for_speed_p (cfun))
7401 {
7402 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7403
7404 if (sqrtfn != NULL_TREE)
7405 {
7406 tree rpart, ipart, result;
7407
7408 arg = builtin_save_expr (arg);
7409
7410 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7411 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7412
7413 rpart = builtin_save_expr (rpart);
7414 ipart = builtin_save_expr (ipart);
7415
7416 result = fold_build2_loc (loc, PLUS_EXPR, type,
7417 fold_build2_loc (loc, MULT_EXPR, type,
7418 rpart, rpart),
7419 fold_build2_loc (loc, MULT_EXPR, type,
7420 ipart, ipart));
7421
7422 return build_call_expr_loc (loc, sqrtfn, 1, result);
7423 }
7424 }
7425
7426 return NULL_TREE;
7427 }
7428
7429 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7430 complex tree type of the result. If NEG is true, the imaginary
7431 zero is negative. */
7432
7433 static tree
7434 build_complex_cproj (tree type, bool neg)
7435 {
7436 REAL_VALUE_TYPE rinf, rzero = dconst0;
7437
7438 real_inf (&rinf);
7439 rzero.sign = neg;
7440 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7441 build_real (TREE_TYPE (type), rzero));
7442 }
7443
7444 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7445 return type. Return NULL_TREE if no simplification can be made. */
7446
7447 static tree
7448 fold_builtin_cproj (location_t loc, tree arg, tree type)
7449 {
7450 if (!validate_arg (arg, COMPLEX_TYPE)
7451 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7452 return NULL_TREE;
7453
7454 /* If there are no infinities, return arg. */
7455 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7456 return non_lvalue_loc (loc, arg);
7457
7458 /* Calculate the result when the argument is a constant. */
7459 if (TREE_CODE (arg) == COMPLEX_CST)
7460 {
7461 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7462 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7463
7464 if (real_isinf (real) || real_isinf (imag))
7465 return build_complex_cproj (type, imag->sign);
7466 else
7467 return arg;
7468 }
7469 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7470 {
7471 tree real = TREE_OPERAND (arg, 0);
7472 tree imag = TREE_OPERAND (arg, 1);
7473
7474 STRIP_NOPS (real);
7475 STRIP_NOPS (imag);
7476
7477 /* If the real part is inf and the imag part is known to be
7478 nonnegative, return (inf + 0i). Remember side-effects are
7479 possible in the imag part. */
7480 if (TREE_CODE (real) == REAL_CST
7481 && real_isinf (TREE_REAL_CST_PTR (real))
7482 && tree_expr_nonnegative_p (imag))
7483 return omit_one_operand_loc (loc, type,
7484 build_complex_cproj (type, false),
7485 arg);
7486
7487 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7488 Remember side-effects are possible in the real part. */
7489 if (TREE_CODE (imag) == REAL_CST
7490 && real_isinf (TREE_REAL_CST_PTR (imag)))
7491 return
7492 omit_one_operand_loc (loc, type,
7493 build_complex_cproj (type, TREE_REAL_CST_PTR
7494 (imag)->sign), arg);
7495 }
7496
7497 return NULL_TREE;
7498 }
7499
7500 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7501 Return NULL_TREE if no simplification can be made. */
7502
7503 static tree
7504 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7505 {
7506
7507 enum built_in_function fcode;
7508 tree res;
7509
7510 if (!validate_arg (arg, REAL_TYPE))
7511 return NULL_TREE;
7512
7513 /* Calculate the result when the argument is a constant. */
7514 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7515 return res;
7516
7517 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7518 fcode = builtin_mathfn_code (arg);
7519 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7520 {
7521 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7522 arg = fold_build2_loc (loc, MULT_EXPR, type,
7523 CALL_EXPR_ARG (arg, 0),
7524 build_real (type, dconsthalf));
7525 return build_call_expr_loc (loc, expfn, 1, arg);
7526 }
7527
7528 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7529 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7530 {
7531 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7532
7533 if (powfn)
7534 {
7535 tree arg0 = CALL_EXPR_ARG (arg, 0);
7536 tree tree_root;
7537 /* The inner root was either sqrt or cbrt. */
7538 /* This was a conditional expression but it triggered a bug
7539 in Sun C 5.5. */
7540 REAL_VALUE_TYPE dconstroot;
7541 if (BUILTIN_SQRT_P (fcode))
7542 dconstroot = dconsthalf;
7543 else
7544 dconstroot = dconst_third ();
7545
7546 /* Adjust for the outer root. */
7547 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7548 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7549 tree_root = build_real (type, dconstroot);
7550 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7551 }
7552 }
7553
7554 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7555 if (flag_unsafe_math_optimizations
7556 && (fcode == BUILT_IN_POW
7557 || fcode == BUILT_IN_POWF
7558 || fcode == BUILT_IN_POWL))
7559 {
7560 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7561 tree arg0 = CALL_EXPR_ARG (arg, 0);
7562 tree arg1 = CALL_EXPR_ARG (arg, 1);
7563 tree narg1;
7564 if (!tree_expr_nonnegative_p (arg0))
7565 arg0 = build1 (ABS_EXPR, type, arg0);
7566 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7567 build_real (type, dconsthalf));
7568 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7569 }
7570
7571 return NULL_TREE;
7572 }
7573
7574 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7575 Return NULL_TREE if no simplification can be made. */
7576
7577 static tree
7578 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7579 {
7580 const enum built_in_function fcode = builtin_mathfn_code (arg);
7581 tree res;
7582
7583 if (!validate_arg (arg, REAL_TYPE))
7584 return NULL_TREE;
7585
7586 /* Calculate the result when the argument is a constant. */
7587 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7588 return res;
7589
7590 if (flag_unsafe_math_optimizations)
7591 {
7592 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7593 if (BUILTIN_EXPONENT_P (fcode))
7594 {
7595 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7596 const REAL_VALUE_TYPE third_trunc =
7597 real_value_truncate (TYPE_MODE (type), dconst_third ());
7598 arg = fold_build2_loc (loc, MULT_EXPR, type,
7599 CALL_EXPR_ARG (arg, 0),
7600 build_real (type, third_trunc));
7601 return build_call_expr_loc (loc, expfn, 1, arg);
7602 }
7603
7604 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7605 if (BUILTIN_SQRT_P (fcode))
7606 {
7607 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7608
7609 if (powfn)
7610 {
7611 tree arg0 = CALL_EXPR_ARG (arg, 0);
7612 tree tree_root;
7613 REAL_VALUE_TYPE dconstroot = dconst_third ();
7614
7615 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7616 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7617 tree_root = build_real (type, dconstroot);
7618 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7619 }
7620 }
7621
7622 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7623 if (BUILTIN_CBRT_P (fcode))
7624 {
7625 tree arg0 = CALL_EXPR_ARG (arg, 0);
7626 if (tree_expr_nonnegative_p (arg0))
7627 {
7628 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7629
7630 if (powfn)
7631 {
7632 tree tree_root;
7633 REAL_VALUE_TYPE dconstroot;
7634
7635 real_arithmetic (&dconstroot, MULT_EXPR,
7636 dconst_third_ptr (), dconst_third_ptr ());
7637 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7638 tree_root = build_real (type, dconstroot);
7639 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7640 }
7641 }
7642 }
7643
7644 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7645 if (fcode == BUILT_IN_POW
7646 || fcode == BUILT_IN_POWF
7647 || fcode == BUILT_IN_POWL)
7648 {
7649 tree arg00 = CALL_EXPR_ARG (arg, 0);
7650 tree arg01 = CALL_EXPR_ARG (arg, 1);
7651 if (tree_expr_nonnegative_p (arg00))
7652 {
7653 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7654 const REAL_VALUE_TYPE dconstroot
7655 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7656 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7657 build_real (type, dconstroot));
7658 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7659 }
7660 }
7661 }
7662 return NULL_TREE;
7663 }
7664
7665 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7666 TYPE is the type of the return value. Return NULL_TREE if no
7667 simplification can be made. */
7668
7669 static tree
7670 fold_builtin_cos (location_t loc,
7671 tree arg, tree type, tree fndecl)
7672 {
7673 tree res, narg;
7674
7675 if (!validate_arg (arg, REAL_TYPE))
7676 return NULL_TREE;
7677
7678 /* Calculate the result when the argument is a constant. */
7679 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7680 return res;
7681
7682 /* Optimize cos(-x) into cos (x). */
7683 if ((narg = fold_strip_sign_ops (arg)))
7684 return build_call_expr_loc (loc, fndecl, 1, narg);
7685
7686 return NULL_TREE;
7687 }
7688
7689 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7690 Return NULL_TREE if no simplification can be made. */
7691
7692 static tree
7693 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7694 {
7695 if (validate_arg (arg, REAL_TYPE))
7696 {
7697 tree res, narg;
7698
7699 /* Calculate the result when the argument is a constant. */
7700 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7701 return res;
7702
7703 /* Optimize cosh(-x) into cosh (x). */
7704 if ((narg = fold_strip_sign_ops (arg)))
7705 return build_call_expr_loc (loc, fndecl, 1, narg);
7706 }
7707
7708 return NULL_TREE;
7709 }
7710
7711 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7712 argument ARG. TYPE is the type of the return value. Return
7713 NULL_TREE if no simplification can be made. */
7714
7715 static tree
7716 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7717 bool hyper)
7718 {
7719 if (validate_arg (arg, COMPLEX_TYPE)
7720 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7721 {
7722 tree tmp;
7723
7724 /* Calculate the result when the argument is a constant. */
7725 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7726 return tmp;
7727
7728 /* Optimize fn(-x) into fn(x). */
7729 if ((tmp = fold_strip_sign_ops (arg)))
7730 return build_call_expr_loc (loc, fndecl, 1, tmp);
7731 }
7732
7733 return NULL_TREE;
7734 }
7735
7736 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7737 Return NULL_TREE if no simplification can be made. */
7738
7739 static tree
7740 fold_builtin_tan (tree arg, tree type)
7741 {
7742 enum built_in_function fcode;
7743 tree res;
7744
7745 if (!validate_arg (arg, REAL_TYPE))
7746 return NULL_TREE;
7747
7748 /* Calculate the result when the argument is a constant. */
7749 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7750 return res;
7751
7752 /* Optimize tan(atan(x)) = x. */
7753 fcode = builtin_mathfn_code (arg);
7754 if (flag_unsafe_math_optimizations
7755 && (fcode == BUILT_IN_ATAN
7756 || fcode == BUILT_IN_ATANF
7757 || fcode == BUILT_IN_ATANL))
7758 return CALL_EXPR_ARG (arg, 0);
7759
7760 return NULL_TREE;
7761 }
7762
7763 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7764 NULL_TREE if no simplification can be made. */
7765
7766 static tree
7767 fold_builtin_sincos (location_t loc,
7768 tree arg0, tree arg1, tree arg2)
7769 {
7770 tree type;
7771 tree res, fn, call;
7772
7773 if (!validate_arg (arg0, REAL_TYPE)
7774 || !validate_arg (arg1, POINTER_TYPE)
7775 || !validate_arg (arg2, POINTER_TYPE))
7776 return NULL_TREE;
7777
7778 type = TREE_TYPE (arg0);
7779
7780 /* Calculate the result when the argument is a constant. */
7781 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7782 return res;
7783
7784 /* Canonicalize sincos to cexpi. */
7785 if (!targetm.libc_has_function (function_c99_math_complex))
7786 return NULL_TREE;
7787 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7788 if (!fn)
7789 return NULL_TREE;
7790
7791 call = build_call_expr_loc (loc, fn, 1, arg0);
7792 call = builtin_save_expr (call);
7793
7794 return build2 (COMPOUND_EXPR, void_type_node,
7795 build2 (MODIFY_EXPR, void_type_node,
7796 build_fold_indirect_ref_loc (loc, arg1),
7797 build1 (IMAGPART_EXPR, type, call)),
7798 build2 (MODIFY_EXPR, void_type_node,
7799 build_fold_indirect_ref_loc (loc, arg2),
7800 build1 (REALPART_EXPR, type, call)));
7801 }
7802
7803 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7804 NULL_TREE if no simplification can be made. */
7805
7806 static tree
7807 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7808 {
7809 tree rtype;
7810 tree realp, imagp, ifn;
7811 tree res;
7812
7813 if (!validate_arg (arg0, COMPLEX_TYPE)
7814 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7815 return NULL_TREE;
7816
7817 /* Calculate the result when the argument is a constant. */
7818 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7819 return res;
7820
7821 rtype = TREE_TYPE (TREE_TYPE (arg0));
7822
7823 /* In case we can figure out the real part of arg0 and it is constant zero
7824 fold to cexpi. */
7825 if (!targetm.libc_has_function (function_c99_math_complex))
7826 return NULL_TREE;
7827 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7828 if (!ifn)
7829 return NULL_TREE;
7830
7831 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7832 && real_zerop (realp))
7833 {
7834 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7835 return build_call_expr_loc (loc, ifn, 1, narg);
7836 }
7837
7838 /* In case we can easily decompose real and imaginary parts split cexp
7839 to exp (r) * cexpi (i). */
7840 if (flag_unsafe_math_optimizations
7841 && realp)
7842 {
7843 tree rfn, rcall, icall;
7844
7845 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7846 if (!rfn)
7847 return NULL_TREE;
7848
7849 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7850 if (!imagp)
7851 return NULL_TREE;
7852
7853 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7854 icall = builtin_save_expr (icall);
7855 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7856 rcall = builtin_save_expr (rcall);
7857 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7858 fold_build2_loc (loc, MULT_EXPR, rtype,
7859 rcall,
7860 fold_build1_loc (loc, REALPART_EXPR,
7861 rtype, icall)),
7862 fold_build2_loc (loc, MULT_EXPR, rtype,
7863 rcall,
7864 fold_build1_loc (loc, IMAGPART_EXPR,
7865 rtype, icall)));
7866 }
7867
7868 return NULL_TREE;
7869 }
7870
7871 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7872 Return NULL_TREE if no simplification can be made. */
7873
7874 static tree
7875 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7876 {
7877 if (!validate_arg (arg, REAL_TYPE))
7878 return NULL_TREE;
7879
7880 /* Optimize trunc of constant value. */
7881 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7882 {
7883 REAL_VALUE_TYPE r, x;
7884 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7885
7886 x = TREE_REAL_CST (arg);
7887 real_trunc (&r, TYPE_MODE (type), &x);
7888 return build_real (type, r);
7889 }
7890
7891 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7892 }
7893
7894 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7895 Return NULL_TREE if no simplification can be made. */
7896
7897 static tree
7898 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7899 {
7900 if (!validate_arg (arg, REAL_TYPE))
7901 return NULL_TREE;
7902
7903 /* Optimize floor of constant value. */
7904 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7905 {
7906 REAL_VALUE_TYPE x;
7907
7908 x = TREE_REAL_CST (arg);
7909 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7910 {
7911 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7912 REAL_VALUE_TYPE r;
7913
7914 real_floor (&r, TYPE_MODE (type), &x);
7915 return build_real (type, r);
7916 }
7917 }
7918
7919 /* Fold floor (x) where x is nonnegative to trunc (x). */
7920 if (tree_expr_nonnegative_p (arg))
7921 {
7922 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7923 if (truncfn)
7924 return build_call_expr_loc (loc, truncfn, 1, arg);
7925 }
7926
7927 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7928 }
7929
7930 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7931 Return NULL_TREE if no simplification can be made. */
7932
7933 static tree
7934 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7935 {
7936 if (!validate_arg (arg, REAL_TYPE))
7937 return NULL_TREE;
7938
7939 /* Optimize ceil of constant value. */
7940 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7941 {
7942 REAL_VALUE_TYPE x;
7943
7944 x = TREE_REAL_CST (arg);
7945 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7946 {
7947 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7948 REAL_VALUE_TYPE r;
7949
7950 real_ceil (&r, TYPE_MODE (type), &x);
7951 return build_real (type, r);
7952 }
7953 }
7954
7955 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7956 }
7957
7958 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7959 Return NULL_TREE if no simplification can be made. */
7960
7961 static tree
7962 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7963 {
7964 if (!validate_arg (arg, REAL_TYPE))
7965 return NULL_TREE;
7966
7967 /* Optimize round of constant value. */
7968 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7969 {
7970 REAL_VALUE_TYPE x;
7971
7972 x = TREE_REAL_CST (arg);
7973 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7974 {
7975 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7976 REAL_VALUE_TYPE r;
7977
7978 real_round (&r, TYPE_MODE (type), &x);
7979 return build_real (type, r);
7980 }
7981 }
7982
7983 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7984 }
7985
7986 /* Fold function call to builtin lround, lroundf or lroundl (or the
7987 corresponding long long versions) and other rounding functions. ARG
7988 is the argument to the call. Return NULL_TREE if no simplification
7989 can be made. */
7990
7991 static tree
7992 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7993 {
7994 if (!validate_arg (arg, REAL_TYPE))
7995 return NULL_TREE;
7996
7997 /* Optimize lround of constant value. */
7998 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7999 {
8000 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8001
8002 if (real_isfinite (&x))
8003 {
8004 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8005 tree ftype = TREE_TYPE (arg);
8006 double_int val;
8007 REAL_VALUE_TYPE r;
8008
8009 switch (DECL_FUNCTION_CODE (fndecl))
8010 {
8011 CASE_FLT_FN (BUILT_IN_IFLOOR):
8012 CASE_FLT_FN (BUILT_IN_LFLOOR):
8013 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8014 real_floor (&r, TYPE_MODE (ftype), &x);
8015 break;
8016
8017 CASE_FLT_FN (BUILT_IN_ICEIL):
8018 CASE_FLT_FN (BUILT_IN_LCEIL):
8019 CASE_FLT_FN (BUILT_IN_LLCEIL):
8020 real_ceil (&r, TYPE_MODE (ftype), &x);
8021 break;
8022
8023 CASE_FLT_FN (BUILT_IN_IROUND):
8024 CASE_FLT_FN (BUILT_IN_LROUND):
8025 CASE_FLT_FN (BUILT_IN_LLROUND):
8026 real_round (&r, TYPE_MODE (ftype), &x);
8027 break;
8028
8029 default:
8030 gcc_unreachable ();
8031 }
8032
8033 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8034 if (double_int_fits_to_tree_p (itype, val))
8035 return double_int_to_tree (itype, val);
8036 }
8037 }
8038
8039 switch (DECL_FUNCTION_CODE (fndecl))
8040 {
8041 CASE_FLT_FN (BUILT_IN_LFLOOR):
8042 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8043 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8044 if (tree_expr_nonnegative_p (arg))
8045 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8046 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8047 break;
8048 default:;
8049 }
8050
8051 return fold_fixed_mathfn (loc, fndecl, arg);
8052 }
8053
8054 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8055 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8056 the argument to the call. Return NULL_TREE if no simplification can
8057 be made. */
8058
8059 static tree
8060 fold_builtin_bitop (tree fndecl, tree arg)
8061 {
8062 if (!validate_arg (arg, INTEGER_TYPE))
8063 return NULL_TREE;
8064
8065 /* Optimize for constant argument. */
8066 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8067 {
8068 HOST_WIDE_INT hi, width, result;
8069 unsigned HOST_WIDE_INT lo;
8070 tree type;
8071
8072 type = TREE_TYPE (arg);
8073 width = TYPE_PRECISION (type);
8074 lo = TREE_INT_CST_LOW (arg);
8075
8076 /* Clear all the bits that are beyond the type's precision. */
8077 if (width > HOST_BITS_PER_WIDE_INT)
8078 {
8079 hi = TREE_INT_CST_HIGH (arg);
8080 if (width < HOST_BITS_PER_DOUBLE_INT)
8081 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8082 }
8083 else
8084 {
8085 hi = 0;
8086 if (width < HOST_BITS_PER_WIDE_INT)
8087 lo &= ~(HOST_WIDE_INT_M1U << width);
8088 }
8089
8090 switch (DECL_FUNCTION_CODE (fndecl))
8091 {
8092 CASE_INT_FN (BUILT_IN_FFS):
8093 if (lo != 0)
8094 result = ffs_hwi (lo);
8095 else if (hi != 0)
8096 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8097 else
8098 result = 0;
8099 break;
8100
8101 CASE_INT_FN (BUILT_IN_CLZ):
8102 if (hi != 0)
8103 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8104 else if (lo != 0)
8105 result = width - floor_log2 (lo) - 1;
8106 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8107 result = width;
8108 break;
8109
8110 CASE_INT_FN (BUILT_IN_CTZ):
8111 if (lo != 0)
8112 result = ctz_hwi (lo);
8113 else if (hi != 0)
8114 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8115 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8116 result = width;
8117 break;
8118
8119 CASE_INT_FN (BUILT_IN_CLRSB):
8120 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8121 return NULL_TREE;
8122 if (width > HOST_BITS_PER_WIDE_INT
8123 && (hi & ((unsigned HOST_WIDE_INT) 1
8124 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8125 {
8126 hi = ~hi & ~(HOST_WIDE_INT_M1U
8127 << (width - HOST_BITS_PER_WIDE_INT - 1));
8128 lo = ~lo;
8129 }
8130 else if (width <= HOST_BITS_PER_WIDE_INT
8131 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8132 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8133 if (hi != 0)
8134 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8135 else if (lo != 0)
8136 result = width - floor_log2 (lo) - 2;
8137 else
8138 result = width - 1;
8139 break;
8140
8141 CASE_INT_FN (BUILT_IN_POPCOUNT):
8142 result = 0;
8143 while (lo)
8144 result++, lo &= lo - 1;
8145 while (hi)
8146 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8147 break;
8148
8149 CASE_INT_FN (BUILT_IN_PARITY):
8150 result = 0;
8151 while (lo)
8152 result++, lo &= lo - 1;
8153 while (hi)
8154 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8155 result &= 1;
8156 break;
8157
8158 default:
8159 gcc_unreachable ();
8160 }
8161
8162 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8163 }
8164
8165 return NULL_TREE;
8166 }
8167
8168 /* Fold function call to builtin_bswap and the short, long and long long
8169 variants. Return NULL_TREE if no simplification can be made. */
8170 static tree
8171 fold_builtin_bswap (tree fndecl, tree arg)
8172 {
8173 if (! validate_arg (arg, INTEGER_TYPE))
8174 return NULL_TREE;
8175
8176 /* Optimize constant value. */
8177 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8178 {
8179 HOST_WIDE_INT hi, width, r_hi = 0;
8180 unsigned HOST_WIDE_INT lo, r_lo = 0;
8181 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8182
8183 width = TYPE_PRECISION (type);
8184 lo = TREE_INT_CST_LOW (arg);
8185 hi = TREE_INT_CST_HIGH (arg);
8186
8187 switch (DECL_FUNCTION_CODE (fndecl))
8188 {
8189 case BUILT_IN_BSWAP16:
8190 case BUILT_IN_BSWAP32:
8191 case BUILT_IN_BSWAP64:
8192 {
8193 int s;
8194
8195 for (s = 0; s < width; s += 8)
8196 {
8197 int d = width - s - 8;
8198 unsigned HOST_WIDE_INT byte;
8199
8200 if (s < HOST_BITS_PER_WIDE_INT)
8201 byte = (lo >> s) & 0xff;
8202 else
8203 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8204
8205 if (d < HOST_BITS_PER_WIDE_INT)
8206 r_lo |= byte << d;
8207 else
8208 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8209 }
8210 }
8211
8212 break;
8213
8214 default:
8215 gcc_unreachable ();
8216 }
8217
8218 if (width < HOST_BITS_PER_WIDE_INT)
8219 return build_int_cst (type, r_lo);
8220 else
8221 return build_int_cst_wide (type, r_lo, r_hi);
8222 }
8223
8224 return NULL_TREE;
8225 }
8226
8227 /* A subroutine of fold_builtin to fold the various logarithmic
8228 functions. Return NULL_TREE if no simplification can me made.
8229 FUNC is the corresponding MPFR logarithm function. */
8230
8231 static tree
8232 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8233 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8234 {
8235 if (validate_arg (arg, REAL_TYPE))
8236 {
8237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8238 tree res;
8239 const enum built_in_function fcode = builtin_mathfn_code (arg);
8240
8241 /* Calculate the result when the argument is a constant. */
8242 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8243 return res;
8244
8245 /* Special case, optimize logN(expN(x)) = x. */
8246 if (flag_unsafe_math_optimizations
8247 && ((func == mpfr_log
8248 && (fcode == BUILT_IN_EXP
8249 || fcode == BUILT_IN_EXPF
8250 || fcode == BUILT_IN_EXPL))
8251 || (func == mpfr_log2
8252 && (fcode == BUILT_IN_EXP2
8253 || fcode == BUILT_IN_EXP2F
8254 || fcode == BUILT_IN_EXP2L))
8255 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8256 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8257
8258 /* Optimize logN(func()) for various exponential functions. We
8259 want to determine the value "x" and the power "exponent" in
8260 order to transform logN(x**exponent) into exponent*logN(x). */
8261 if (flag_unsafe_math_optimizations)
8262 {
8263 tree exponent = 0, x = 0;
8264
8265 switch (fcode)
8266 {
8267 CASE_FLT_FN (BUILT_IN_EXP):
8268 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8269 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8270 dconst_e ()));
8271 exponent = CALL_EXPR_ARG (arg, 0);
8272 break;
8273 CASE_FLT_FN (BUILT_IN_EXP2):
8274 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8275 x = build_real (type, dconst2);
8276 exponent = CALL_EXPR_ARG (arg, 0);
8277 break;
8278 CASE_FLT_FN (BUILT_IN_EXP10):
8279 CASE_FLT_FN (BUILT_IN_POW10):
8280 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8281 {
8282 REAL_VALUE_TYPE dconst10;
8283 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8284 x = build_real (type, dconst10);
8285 }
8286 exponent = CALL_EXPR_ARG (arg, 0);
8287 break;
8288 CASE_FLT_FN (BUILT_IN_SQRT):
8289 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8290 x = CALL_EXPR_ARG (arg, 0);
8291 exponent = build_real (type, dconsthalf);
8292 break;
8293 CASE_FLT_FN (BUILT_IN_CBRT):
8294 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8295 x = CALL_EXPR_ARG (arg, 0);
8296 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8297 dconst_third ()));
8298 break;
8299 CASE_FLT_FN (BUILT_IN_POW):
8300 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8301 x = CALL_EXPR_ARG (arg, 0);
8302 exponent = CALL_EXPR_ARG (arg, 1);
8303 break;
8304 default:
8305 break;
8306 }
8307
8308 /* Now perform the optimization. */
8309 if (x && exponent)
8310 {
8311 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8312 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8313 }
8314 }
8315 }
8316
8317 return NULL_TREE;
8318 }
8319
8320 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8321 NULL_TREE if no simplification can be made. */
8322
8323 static tree
8324 fold_builtin_hypot (location_t loc, tree fndecl,
8325 tree arg0, tree arg1, tree type)
8326 {
8327 tree res, narg0, narg1;
8328
8329 if (!validate_arg (arg0, REAL_TYPE)
8330 || !validate_arg (arg1, REAL_TYPE))
8331 return NULL_TREE;
8332
8333 /* Calculate the result when the argument is a constant. */
8334 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8335 return res;
8336
8337 /* If either argument to hypot has a negate or abs, strip that off.
8338 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8339 narg0 = fold_strip_sign_ops (arg0);
8340 narg1 = fold_strip_sign_ops (arg1);
8341 if (narg0 || narg1)
8342 {
8343 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8344 narg1 ? narg1 : arg1);
8345 }
8346
8347 /* If either argument is zero, hypot is fabs of the other. */
8348 if (real_zerop (arg0))
8349 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8350 else if (real_zerop (arg1))
8351 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8352
8353 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8354 if (flag_unsafe_math_optimizations
8355 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8356 {
8357 const REAL_VALUE_TYPE sqrt2_trunc
8358 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8359 return fold_build2_loc (loc, MULT_EXPR, type,
8360 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8361 build_real (type, sqrt2_trunc));
8362 }
8363
8364 return NULL_TREE;
8365 }
8366
8367
8368 /* Fold a builtin function call to pow, powf, or powl. Return
8369 NULL_TREE if no simplification can be made. */
8370 static tree
8371 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8372 {
8373 tree res;
8374
8375 if (!validate_arg (arg0, REAL_TYPE)
8376 || !validate_arg (arg1, REAL_TYPE))
8377 return NULL_TREE;
8378
8379 /* Calculate the result when the argument is a constant. */
8380 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8381 return res;
8382
8383 /* Optimize pow(1.0,y) = 1.0. */
8384 if (real_onep (arg0))
8385 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8386
8387 if (TREE_CODE (arg1) == REAL_CST
8388 && !TREE_OVERFLOW (arg1))
8389 {
8390 REAL_VALUE_TYPE cint;
8391 REAL_VALUE_TYPE c;
8392 HOST_WIDE_INT n;
8393
8394 c = TREE_REAL_CST (arg1);
8395
8396 /* Optimize pow(x,0.0) = 1.0. */
8397 if (REAL_VALUES_EQUAL (c, dconst0))
8398 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8399 arg0);
8400
8401 /* Optimize pow(x,1.0) = x. */
8402 if (REAL_VALUES_EQUAL (c, dconst1))
8403 return arg0;
8404
8405 /* Optimize pow(x,-1.0) = 1.0/x. */
8406 if (REAL_VALUES_EQUAL (c, dconstm1))
8407 return fold_build2_loc (loc, RDIV_EXPR, type,
8408 build_real (type, dconst1), arg0);
8409
8410 /* Optimize pow(x,0.5) = sqrt(x). */
8411 if (flag_unsafe_math_optimizations
8412 && REAL_VALUES_EQUAL (c, dconsthalf))
8413 {
8414 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8415
8416 if (sqrtfn != NULL_TREE)
8417 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8418 }
8419
8420 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8421 if (flag_unsafe_math_optimizations)
8422 {
8423 const REAL_VALUE_TYPE dconstroot
8424 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8425
8426 if (REAL_VALUES_EQUAL (c, dconstroot))
8427 {
8428 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8429 if (cbrtfn != NULL_TREE)
8430 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8431 }
8432 }
8433
8434 /* Check for an integer exponent. */
8435 n = real_to_integer (&c);
8436 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8437 if (real_identical (&c, &cint))
8438 {
8439 /* Attempt to evaluate pow at compile-time, unless this should
8440 raise an exception. */
8441 if (TREE_CODE (arg0) == REAL_CST
8442 && !TREE_OVERFLOW (arg0)
8443 && (n > 0
8444 || (!flag_trapping_math && !flag_errno_math)
8445 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8446 {
8447 REAL_VALUE_TYPE x;
8448 bool inexact;
8449
8450 x = TREE_REAL_CST (arg0);
8451 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8452 if (flag_unsafe_math_optimizations || !inexact)
8453 return build_real (type, x);
8454 }
8455
8456 /* Strip sign ops from even integer powers. */
8457 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8458 {
8459 tree narg0 = fold_strip_sign_ops (arg0);
8460 if (narg0)
8461 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8462 }
8463 }
8464 }
8465
8466 if (flag_unsafe_math_optimizations)
8467 {
8468 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8469
8470 /* Optimize pow(expN(x),y) = expN(x*y). */
8471 if (BUILTIN_EXPONENT_P (fcode))
8472 {
8473 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8474 tree arg = CALL_EXPR_ARG (arg0, 0);
8475 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8476 return build_call_expr_loc (loc, expfn, 1, arg);
8477 }
8478
8479 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8480 if (BUILTIN_SQRT_P (fcode))
8481 {
8482 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8483 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8484 build_real (type, dconsthalf));
8485 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8486 }
8487
8488 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8489 if (BUILTIN_CBRT_P (fcode))
8490 {
8491 tree arg = CALL_EXPR_ARG (arg0, 0);
8492 if (tree_expr_nonnegative_p (arg))
8493 {
8494 const REAL_VALUE_TYPE dconstroot
8495 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8496 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8497 build_real (type, dconstroot));
8498 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8499 }
8500 }
8501
8502 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8503 if (fcode == BUILT_IN_POW
8504 || fcode == BUILT_IN_POWF
8505 || fcode == BUILT_IN_POWL)
8506 {
8507 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8508 if (tree_expr_nonnegative_p (arg00))
8509 {
8510 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8511 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8512 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8513 }
8514 }
8515 }
8516
8517 return NULL_TREE;
8518 }
8519
8520 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8521 Return NULL_TREE if no simplification can be made. */
8522 static tree
8523 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8524 tree arg0, tree arg1, tree type)
8525 {
8526 if (!validate_arg (arg0, REAL_TYPE)
8527 || !validate_arg (arg1, INTEGER_TYPE))
8528 return NULL_TREE;
8529
8530 /* Optimize pow(1.0,y) = 1.0. */
8531 if (real_onep (arg0))
8532 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8533
8534 if (tree_fits_shwi_p (arg1))
8535 {
8536 HOST_WIDE_INT c = tree_to_shwi (arg1);
8537
8538 /* Evaluate powi at compile-time. */
8539 if (TREE_CODE (arg0) == REAL_CST
8540 && !TREE_OVERFLOW (arg0))
8541 {
8542 REAL_VALUE_TYPE x;
8543 x = TREE_REAL_CST (arg0);
8544 real_powi (&x, TYPE_MODE (type), &x, c);
8545 return build_real (type, x);
8546 }
8547
8548 /* Optimize pow(x,0) = 1.0. */
8549 if (c == 0)
8550 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8551 arg0);
8552
8553 /* Optimize pow(x,1) = x. */
8554 if (c == 1)
8555 return arg0;
8556
8557 /* Optimize pow(x,-1) = 1.0/x. */
8558 if (c == -1)
8559 return fold_build2_loc (loc, RDIV_EXPR, type,
8560 build_real (type, dconst1), arg0);
8561 }
8562
8563 return NULL_TREE;
8564 }
8565
8566 /* A subroutine of fold_builtin to fold the various exponent
8567 functions. Return NULL_TREE if no simplification can be made.
8568 FUNC is the corresponding MPFR exponent function. */
8569
8570 static tree
8571 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8572 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8573 {
8574 if (validate_arg (arg, REAL_TYPE))
8575 {
8576 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8577 tree res;
8578
8579 /* Calculate the result when the argument is a constant. */
8580 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8581 return res;
8582
8583 /* Optimize expN(logN(x)) = x. */
8584 if (flag_unsafe_math_optimizations)
8585 {
8586 const enum built_in_function fcode = builtin_mathfn_code (arg);
8587
8588 if ((func == mpfr_exp
8589 && (fcode == BUILT_IN_LOG
8590 || fcode == BUILT_IN_LOGF
8591 || fcode == BUILT_IN_LOGL))
8592 || (func == mpfr_exp2
8593 && (fcode == BUILT_IN_LOG2
8594 || fcode == BUILT_IN_LOG2F
8595 || fcode == BUILT_IN_LOG2L))
8596 || (func == mpfr_exp10
8597 && (fcode == BUILT_IN_LOG10
8598 || fcode == BUILT_IN_LOG10F
8599 || fcode == BUILT_IN_LOG10L)))
8600 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8601 }
8602 }
8603
8604 return NULL_TREE;
8605 }
8606
8607 /* Return true if VAR is a VAR_DECL or a component thereof. */
8608
8609 static bool
8610 var_decl_component_p (tree var)
8611 {
8612 tree inner = var;
8613 while (handled_component_p (inner))
8614 inner = TREE_OPERAND (inner, 0);
8615 return SSA_VAR_P (inner);
8616 }
8617
8618 /* Fold function call to builtin memset. Return
8619 NULL_TREE if no simplification can be made. */
8620
8621 static tree
8622 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8623 tree type, bool ignore)
8624 {
8625 tree var, ret, etype;
8626 unsigned HOST_WIDE_INT length, cval;
8627
8628 if (! validate_arg (dest, POINTER_TYPE)
8629 || ! validate_arg (c, INTEGER_TYPE)
8630 || ! validate_arg (len, INTEGER_TYPE))
8631 return NULL_TREE;
8632
8633 if (! tree_fits_uhwi_p (len))
8634 return NULL_TREE;
8635
8636 /* If the LEN parameter is zero, return DEST. */
8637 if (integer_zerop (len))
8638 return omit_one_operand_loc (loc, type, dest, c);
8639
8640 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8641 return NULL_TREE;
8642
8643 var = dest;
8644 STRIP_NOPS (var);
8645 if (TREE_CODE (var) != ADDR_EXPR)
8646 return NULL_TREE;
8647
8648 var = TREE_OPERAND (var, 0);
8649 if (TREE_THIS_VOLATILE (var))
8650 return NULL_TREE;
8651
8652 etype = TREE_TYPE (var);
8653 if (TREE_CODE (etype) == ARRAY_TYPE)
8654 etype = TREE_TYPE (etype);
8655
8656 if (!INTEGRAL_TYPE_P (etype)
8657 && !POINTER_TYPE_P (etype))
8658 return NULL_TREE;
8659
8660 if (! var_decl_component_p (var))
8661 return NULL_TREE;
8662
8663 length = tree_to_uhwi (len);
8664 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8665 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8666 return NULL_TREE;
8667
8668 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8669 return NULL_TREE;
8670
8671 if (integer_zerop (c))
8672 cval = 0;
8673 else
8674 {
8675 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8676 return NULL_TREE;
8677
8678 cval = TREE_INT_CST_LOW (c);
8679 cval &= 0xff;
8680 cval |= cval << 8;
8681 cval |= cval << 16;
8682 cval |= (cval << 31) << 1;
8683 }
8684
8685 ret = build_int_cst_type (etype, cval);
8686 var = build_fold_indirect_ref_loc (loc,
8687 fold_convert_loc (loc,
8688 build_pointer_type (etype),
8689 dest));
8690 ret = build2 (MODIFY_EXPR, etype, var, ret);
8691 if (ignore)
8692 return ret;
8693
8694 return omit_one_operand_loc (loc, type, dest, ret);
8695 }
8696
8697 /* Fold function call to builtin memset. Return
8698 NULL_TREE if no simplification can be made. */
8699
8700 static tree
8701 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8702 {
8703 if (! validate_arg (dest, POINTER_TYPE)
8704 || ! validate_arg (size, INTEGER_TYPE))
8705 return NULL_TREE;
8706
8707 if (!ignore)
8708 return NULL_TREE;
8709
8710 /* New argument list transforming bzero(ptr x, int y) to
8711 memset(ptr x, int 0, size_t y). This is done this way
8712 so that if it isn't expanded inline, we fallback to
8713 calling bzero instead of memset. */
8714
8715 return fold_builtin_memset (loc, dest, integer_zero_node,
8716 fold_convert_loc (loc, size_type_node, size),
8717 void_type_node, ignore);
8718 }
8719
8720 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8721 NULL_TREE if no simplification can be made.
8722 If ENDP is 0, return DEST (like memcpy).
8723 If ENDP is 1, return DEST+LEN (like mempcpy).
8724 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8725 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8726 (memmove). */
8727
8728 static tree
8729 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8730 tree len, tree type, bool ignore, int endp)
8731 {
8732 tree destvar, srcvar, expr;
8733
8734 if (! validate_arg (dest, POINTER_TYPE)
8735 || ! validate_arg (src, POINTER_TYPE)
8736 || ! validate_arg (len, INTEGER_TYPE))
8737 return NULL_TREE;
8738
8739 /* If the LEN parameter is zero, return DEST. */
8740 if (integer_zerop (len))
8741 return omit_one_operand_loc (loc, type, dest, src);
8742
8743 /* If SRC and DEST are the same (and not volatile), return
8744 DEST{,+LEN,+LEN-1}. */
8745 if (operand_equal_p (src, dest, 0))
8746 expr = len;
8747 else
8748 {
8749 tree srctype, desttype;
8750 unsigned int src_align, dest_align;
8751 tree off0;
8752
8753 if (endp == 3)
8754 {
8755 src_align = get_pointer_alignment (src);
8756 dest_align = get_pointer_alignment (dest);
8757
8758 /* Both DEST and SRC must be pointer types.
8759 ??? This is what old code did. Is the testing for pointer types
8760 really mandatory?
8761
8762 If either SRC is readonly or length is 1, we can use memcpy. */
8763 if (!dest_align || !src_align)
8764 return NULL_TREE;
8765 if (readonly_data_expr (src)
8766 || (tree_fits_uhwi_p (len)
8767 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8768 >= tree_to_uhwi (len))))
8769 {
8770 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8771 if (!fn)
8772 return NULL_TREE;
8773 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8774 }
8775
8776 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8777 if (TREE_CODE (src) == ADDR_EXPR
8778 && TREE_CODE (dest) == ADDR_EXPR)
8779 {
8780 tree src_base, dest_base, fn;
8781 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8782 HOST_WIDE_INT size = -1;
8783 HOST_WIDE_INT maxsize = -1;
8784
8785 srcvar = TREE_OPERAND (src, 0);
8786 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8787 &size, &maxsize);
8788 destvar = TREE_OPERAND (dest, 0);
8789 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8790 &size, &maxsize);
8791 if (tree_fits_uhwi_p (len))
8792 maxsize = tree_to_uhwi (len);
8793 else
8794 maxsize = -1;
8795 src_offset /= BITS_PER_UNIT;
8796 dest_offset /= BITS_PER_UNIT;
8797 if (SSA_VAR_P (src_base)
8798 && SSA_VAR_P (dest_base))
8799 {
8800 if (operand_equal_p (src_base, dest_base, 0)
8801 && ranges_overlap_p (src_offset, maxsize,
8802 dest_offset, maxsize))
8803 return NULL_TREE;
8804 }
8805 else if (TREE_CODE (src_base) == MEM_REF
8806 && TREE_CODE (dest_base) == MEM_REF)
8807 {
8808 double_int off;
8809 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8810 TREE_OPERAND (dest_base, 0), 0))
8811 return NULL_TREE;
8812 off = mem_ref_offset (src_base) +
8813 double_int::from_shwi (src_offset);
8814 if (!off.fits_shwi ())
8815 return NULL_TREE;
8816 src_offset = off.low;
8817 off = mem_ref_offset (dest_base) +
8818 double_int::from_shwi (dest_offset);
8819 if (!off.fits_shwi ())
8820 return NULL_TREE;
8821 dest_offset = off.low;
8822 if (ranges_overlap_p (src_offset, maxsize,
8823 dest_offset, maxsize))
8824 return NULL_TREE;
8825 }
8826 else
8827 return NULL_TREE;
8828
8829 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8830 if (!fn)
8831 return NULL_TREE;
8832 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8833 }
8834
8835 /* If the destination and source do not alias optimize into
8836 memcpy as well. */
8837 if ((is_gimple_min_invariant (dest)
8838 || TREE_CODE (dest) == SSA_NAME)
8839 && (is_gimple_min_invariant (src)
8840 || TREE_CODE (src) == SSA_NAME))
8841 {
8842 ao_ref destr, srcr;
8843 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8844 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8845 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8846 {
8847 tree fn;
8848 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8849 if (!fn)
8850 return NULL_TREE;
8851 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8852 }
8853 }
8854
8855 return NULL_TREE;
8856 }
8857
8858 if (!tree_fits_shwi_p (len))
8859 return NULL_TREE;
8860 /* FIXME:
8861 This logic lose for arguments like (type *)malloc (sizeof (type)),
8862 since we strip the casts of up to VOID return value from malloc.
8863 Perhaps we ought to inherit type from non-VOID argument here? */
8864 STRIP_NOPS (src);
8865 STRIP_NOPS (dest);
8866 if (!POINTER_TYPE_P (TREE_TYPE (src))
8867 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8868 return NULL_TREE;
8869 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8870 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8871 {
8872 tree tem = TREE_OPERAND (src, 0);
8873 STRIP_NOPS (tem);
8874 if (tem != TREE_OPERAND (src, 0))
8875 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8876 }
8877 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8878 {
8879 tree tem = TREE_OPERAND (dest, 0);
8880 STRIP_NOPS (tem);
8881 if (tem != TREE_OPERAND (dest, 0))
8882 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8883 }
8884 srctype = TREE_TYPE (TREE_TYPE (src));
8885 if (TREE_CODE (srctype) == ARRAY_TYPE
8886 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8887 {
8888 srctype = TREE_TYPE (srctype);
8889 STRIP_NOPS (src);
8890 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8891 }
8892 desttype = TREE_TYPE (TREE_TYPE (dest));
8893 if (TREE_CODE (desttype) == ARRAY_TYPE
8894 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8895 {
8896 desttype = TREE_TYPE (desttype);
8897 STRIP_NOPS (dest);
8898 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8899 }
8900 if (TREE_ADDRESSABLE (srctype)
8901 || TREE_ADDRESSABLE (desttype))
8902 return NULL_TREE;
8903
8904 src_align = get_pointer_alignment (src);
8905 dest_align = get_pointer_alignment (dest);
8906 if (dest_align < TYPE_ALIGN (desttype)
8907 || src_align < TYPE_ALIGN (srctype))
8908 return NULL_TREE;
8909
8910 if (!ignore)
8911 dest = builtin_save_expr (dest);
8912
8913 /* Build accesses at offset zero with a ref-all character type. */
8914 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8915 ptr_mode, true), 0);
8916
8917 /* For -fsanitize={bool,enum} make sure the load isn't performed in
8918 the bool or enum type. */
8919 if (((flag_sanitize & SANITIZE_BOOL)
8920 && TREE_CODE (desttype) == BOOLEAN_TYPE)
8921 || ((flag_sanitize & SANITIZE_ENUM)
8922 && TREE_CODE (desttype) == ENUMERAL_TYPE))
8923 {
8924 tree destitype
8925 = lang_hooks.types.type_for_mode (TYPE_MODE (desttype),
8926 TYPE_UNSIGNED (desttype));
8927 desttype = build_aligned_type (destitype, TYPE_ALIGN (desttype));
8928 }
8929 if (((flag_sanitize & SANITIZE_BOOL)
8930 && TREE_CODE (srctype) == BOOLEAN_TYPE)
8931 || ((flag_sanitize & SANITIZE_ENUM)
8932 && TREE_CODE (srctype) == ENUMERAL_TYPE))
8933 {
8934 tree srcitype
8935 = lang_hooks.types.type_for_mode (TYPE_MODE (srctype),
8936 TYPE_UNSIGNED (srctype));
8937 srctype = build_aligned_type (srcitype, TYPE_ALIGN (srctype));
8938 }
8939
8940 destvar = dest;
8941 STRIP_NOPS (destvar);
8942 if (TREE_CODE (destvar) == ADDR_EXPR
8943 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8944 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8945 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8946 else
8947 destvar = NULL_TREE;
8948
8949 srcvar = src;
8950 STRIP_NOPS (srcvar);
8951 if (TREE_CODE (srcvar) == ADDR_EXPR
8952 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8953 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8954 {
8955 if (!destvar
8956 || src_align >= TYPE_ALIGN (desttype))
8957 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8958 srcvar, off0);
8959 else if (!STRICT_ALIGNMENT)
8960 {
8961 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8962 src_align);
8963 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8964 }
8965 else
8966 srcvar = NULL_TREE;
8967 }
8968 else
8969 srcvar = NULL_TREE;
8970
8971 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8972 return NULL_TREE;
8973
8974 if (srcvar == NULL_TREE)
8975 {
8976 STRIP_NOPS (src);
8977 if (src_align >= TYPE_ALIGN (desttype))
8978 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8979 else
8980 {
8981 if (STRICT_ALIGNMENT)
8982 return NULL_TREE;
8983 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8984 src_align);
8985 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8986 }
8987 }
8988 else if (destvar == NULL_TREE)
8989 {
8990 STRIP_NOPS (dest);
8991 if (dest_align >= TYPE_ALIGN (srctype))
8992 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8993 else
8994 {
8995 if (STRICT_ALIGNMENT)
8996 return NULL_TREE;
8997 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8998 dest_align);
8999 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9000 }
9001 }
9002
9003 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9004 }
9005
9006 if (ignore)
9007 return expr;
9008
9009 if (endp == 0 || endp == 3)
9010 return omit_one_operand_loc (loc, type, dest, expr);
9011
9012 if (expr == len)
9013 expr = NULL_TREE;
9014
9015 if (endp == 2)
9016 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9017 ssize_int (1));
9018
9019 dest = fold_build_pointer_plus_loc (loc, dest, len);
9020 dest = fold_convert_loc (loc, type, dest);
9021 if (expr)
9022 dest = omit_one_operand_loc (loc, type, dest, expr);
9023 return dest;
9024 }
9025
9026 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9027 If LEN is not NULL, it represents the length of the string to be
9028 copied. Return NULL_TREE if no simplification can be made. */
9029
9030 tree
9031 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9032 {
9033 tree fn;
9034
9035 if (!validate_arg (dest, POINTER_TYPE)
9036 || !validate_arg (src, POINTER_TYPE))
9037 return NULL_TREE;
9038
9039 /* If SRC and DEST are the same (and not volatile), return DEST. */
9040 if (operand_equal_p (src, dest, 0))
9041 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9042
9043 if (optimize_function_for_size_p (cfun))
9044 return NULL_TREE;
9045
9046 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9047 if (!fn)
9048 return NULL_TREE;
9049
9050 if (!len)
9051 {
9052 len = c_strlen (src, 1);
9053 if (! len || TREE_SIDE_EFFECTS (len))
9054 return NULL_TREE;
9055 }
9056
9057 len = fold_convert_loc (loc, size_type_node, len);
9058 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9059 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9060 build_call_expr_loc (loc, fn, 3, dest, src, len));
9061 }
9062
9063 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9064 Return NULL_TREE if no simplification can be made. */
9065
9066 static tree
9067 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9068 {
9069 tree fn, len, lenp1, call, type;
9070
9071 if (!validate_arg (dest, POINTER_TYPE)
9072 || !validate_arg (src, POINTER_TYPE))
9073 return NULL_TREE;
9074
9075 len = c_strlen (src, 1);
9076 if (!len
9077 || TREE_CODE (len) != INTEGER_CST)
9078 return NULL_TREE;
9079
9080 if (optimize_function_for_size_p (cfun)
9081 /* If length is zero it's small enough. */
9082 && !integer_zerop (len))
9083 return NULL_TREE;
9084
9085 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9086 if (!fn)
9087 return NULL_TREE;
9088
9089 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9090 fold_convert_loc (loc, size_type_node, len),
9091 build_int_cst (size_type_node, 1));
9092 /* We use dest twice in building our expression. Save it from
9093 multiple expansions. */
9094 dest = builtin_save_expr (dest);
9095 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9096
9097 type = TREE_TYPE (TREE_TYPE (fndecl));
9098 dest = fold_build_pointer_plus_loc (loc, dest, len);
9099 dest = fold_convert_loc (loc, type, dest);
9100 dest = omit_one_operand_loc (loc, type, dest, call);
9101 return dest;
9102 }
9103
9104 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9105 If SLEN is not NULL, it represents the length of the source string.
9106 Return NULL_TREE if no simplification can be made. */
9107
9108 tree
9109 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9110 tree src, tree len, tree slen)
9111 {
9112 tree fn;
9113
9114 if (!validate_arg (dest, POINTER_TYPE)
9115 || !validate_arg (src, POINTER_TYPE)
9116 || !validate_arg (len, INTEGER_TYPE))
9117 return NULL_TREE;
9118
9119 /* If the LEN parameter is zero, return DEST. */
9120 if (integer_zerop (len))
9121 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9122
9123 /* We can't compare slen with len as constants below if len is not a
9124 constant. */
9125 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9126 return NULL_TREE;
9127
9128 if (!slen)
9129 slen = c_strlen (src, 1);
9130
9131 /* Now, we must be passed a constant src ptr parameter. */
9132 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9133 return NULL_TREE;
9134
9135 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9136
9137 /* We do not support simplification of this case, though we do
9138 support it when expanding trees into RTL. */
9139 /* FIXME: generate a call to __builtin_memset. */
9140 if (tree_int_cst_lt (slen, len))
9141 return NULL_TREE;
9142
9143 /* OK transform into builtin memcpy. */
9144 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9145 if (!fn)
9146 return NULL_TREE;
9147
9148 len = fold_convert_loc (loc, size_type_node, len);
9149 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9150 build_call_expr_loc (loc, fn, 3, dest, src, len));
9151 }
9152
9153 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9154 arguments to the call, and TYPE is its return type.
9155 Return NULL_TREE if no simplification can be made. */
9156
9157 static tree
9158 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9159 {
9160 if (!validate_arg (arg1, POINTER_TYPE)
9161 || !validate_arg (arg2, INTEGER_TYPE)
9162 || !validate_arg (len, INTEGER_TYPE))
9163 return NULL_TREE;
9164 else
9165 {
9166 const char *p1;
9167
9168 if (TREE_CODE (arg2) != INTEGER_CST
9169 || !tree_fits_uhwi_p (len))
9170 return NULL_TREE;
9171
9172 p1 = c_getstr (arg1);
9173 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9174 {
9175 char c;
9176 const char *r;
9177 tree tem;
9178
9179 if (target_char_cast (arg2, &c))
9180 return NULL_TREE;
9181
9182 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9183
9184 if (r == NULL)
9185 return build_int_cst (TREE_TYPE (arg1), 0);
9186
9187 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9188 return fold_convert_loc (loc, type, tem);
9189 }
9190 return NULL_TREE;
9191 }
9192 }
9193
9194 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9195 Return NULL_TREE if no simplification can be made. */
9196
9197 static tree
9198 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9199 {
9200 const char *p1, *p2;
9201
9202 if (!validate_arg (arg1, POINTER_TYPE)
9203 || !validate_arg (arg2, POINTER_TYPE)
9204 || !validate_arg (len, INTEGER_TYPE))
9205 return NULL_TREE;
9206
9207 /* If the LEN parameter is zero, return zero. */
9208 if (integer_zerop (len))
9209 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9210 arg1, arg2);
9211
9212 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9213 if (operand_equal_p (arg1, arg2, 0))
9214 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9215
9216 p1 = c_getstr (arg1);
9217 p2 = c_getstr (arg2);
9218
9219 /* If all arguments are constant, and the value of len is not greater
9220 than the lengths of arg1 and arg2, evaluate at compile-time. */
9221 if (tree_fits_uhwi_p (len) && p1 && p2
9222 && compare_tree_int (len, strlen (p1) + 1) <= 0
9223 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9224 {
9225 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9226
9227 if (r > 0)
9228 return integer_one_node;
9229 else if (r < 0)
9230 return integer_minus_one_node;
9231 else
9232 return integer_zero_node;
9233 }
9234
9235 /* If len parameter is one, return an expression corresponding to
9236 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9237 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9238 {
9239 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9240 tree cst_uchar_ptr_node
9241 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9242
9243 tree ind1
9244 = fold_convert_loc (loc, integer_type_node,
9245 build1 (INDIRECT_REF, cst_uchar_node,
9246 fold_convert_loc (loc,
9247 cst_uchar_ptr_node,
9248 arg1)));
9249 tree ind2
9250 = fold_convert_loc (loc, integer_type_node,
9251 build1 (INDIRECT_REF, cst_uchar_node,
9252 fold_convert_loc (loc,
9253 cst_uchar_ptr_node,
9254 arg2)));
9255 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9256 }
9257
9258 return NULL_TREE;
9259 }
9260
9261 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9262 Return NULL_TREE if no simplification can be made. */
9263
9264 static tree
9265 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9266 {
9267 const char *p1, *p2;
9268
9269 if (!validate_arg (arg1, POINTER_TYPE)
9270 || !validate_arg (arg2, POINTER_TYPE))
9271 return NULL_TREE;
9272
9273 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9274 if (operand_equal_p (arg1, arg2, 0))
9275 return integer_zero_node;
9276
9277 p1 = c_getstr (arg1);
9278 p2 = c_getstr (arg2);
9279
9280 if (p1 && p2)
9281 {
9282 const int i = strcmp (p1, p2);
9283 if (i < 0)
9284 return integer_minus_one_node;
9285 else if (i > 0)
9286 return integer_one_node;
9287 else
9288 return integer_zero_node;
9289 }
9290
9291 /* If the second arg is "", return *(const unsigned char*)arg1. */
9292 if (p2 && *p2 == '\0')
9293 {
9294 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9295 tree cst_uchar_ptr_node
9296 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9297
9298 return fold_convert_loc (loc, integer_type_node,
9299 build1 (INDIRECT_REF, cst_uchar_node,
9300 fold_convert_loc (loc,
9301 cst_uchar_ptr_node,
9302 arg1)));
9303 }
9304
9305 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9306 if (p1 && *p1 == '\0')
9307 {
9308 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9309 tree cst_uchar_ptr_node
9310 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9311
9312 tree temp
9313 = fold_convert_loc (loc, integer_type_node,
9314 build1 (INDIRECT_REF, cst_uchar_node,
9315 fold_convert_loc (loc,
9316 cst_uchar_ptr_node,
9317 arg2)));
9318 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9319 }
9320
9321 return NULL_TREE;
9322 }
9323
9324 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9325 Return NULL_TREE if no simplification can be made. */
9326
9327 static tree
9328 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9329 {
9330 const char *p1, *p2;
9331
9332 if (!validate_arg (arg1, POINTER_TYPE)
9333 || !validate_arg (arg2, POINTER_TYPE)
9334 || !validate_arg (len, INTEGER_TYPE))
9335 return NULL_TREE;
9336
9337 /* If the LEN parameter is zero, return zero. */
9338 if (integer_zerop (len))
9339 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9340 arg1, arg2);
9341
9342 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9343 if (operand_equal_p (arg1, arg2, 0))
9344 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9345
9346 p1 = c_getstr (arg1);
9347 p2 = c_getstr (arg2);
9348
9349 if (tree_fits_uhwi_p (len) && p1 && p2)
9350 {
9351 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9352 if (i > 0)
9353 return integer_one_node;
9354 else if (i < 0)
9355 return integer_minus_one_node;
9356 else
9357 return integer_zero_node;
9358 }
9359
9360 /* If the second arg is "", and the length is greater than zero,
9361 return *(const unsigned char*)arg1. */
9362 if (p2 && *p2 == '\0'
9363 && TREE_CODE (len) == INTEGER_CST
9364 && tree_int_cst_sgn (len) == 1)
9365 {
9366 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9367 tree cst_uchar_ptr_node
9368 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9369
9370 return fold_convert_loc (loc, integer_type_node,
9371 build1 (INDIRECT_REF, cst_uchar_node,
9372 fold_convert_loc (loc,
9373 cst_uchar_ptr_node,
9374 arg1)));
9375 }
9376
9377 /* If the first arg is "", and the length is greater than zero,
9378 return -*(const unsigned char*)arg2. */
9379 if (p1 && *p1 == '\0'
9380 && TREE_CODE (len) == INTEGER_CST
9381 && tree_int_cst_sgn (len) == 1)
9382 {
9383 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9384 tree cst_uchar_ptr_node
9385 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9386
9387 tree temp = fold_convert_loc (loc, integer_type_node,
9388 build1 (INDIRECT_REF, cst_uchar_node,
9389 fold_convert_loc (loc,
9390 cst_uchar_ptr_node,
9391 arg2)));
9392 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9393 }
9394
9395 /* If len parameter is one, return an expression corresponding to
9396 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9397 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9398 {
9399 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9400 tree cst_uchar_ptr_node
9401 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9402
9403 tree ind1 = fold_convert_loc (loc, integer_type_node,
9404 build1 (INDIRECT_REF, cst_uchar_node,
9405 fold_convert_loc (loc,
9406 cst_uchar_ptr_node,
9407 arg1)));
9408 tree ind2 = fold_convert_loc (loc, integer_type_node,
9409 build1 (INDIRECT_REF, cst_uchar_node,
9410 fold_convert_loc (loc,
9411 cst_uchar_ptr_node,
9412 arg2)));
9413 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9414 }
9415
9416 return NULL_TREE;
9417 }
9418
9419 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9420 ARG. Return NULL_TREE if no simplification can be made. */
9421
9422 static tree
9423 fold_builtin_signbit (location_t loc, tree arg, tree type)
9424 {
9425 if (!validate_arg (arg, REAL_TYPE))
9426 return NULL_TREE;
9427
9428 /* If ARG is a compile-time constant, determine the result. */
9429 if (TREE_CODE (arg) == REAL_CST
9430 && !TREE_OVERFLOW (arg))
9431 {
9432 REAL_VALUE_TYPE c;
9433
9434 c = TREE_REAL_CST (arg);
9435 return (REAL_VALUE_NEGATIVE (c)
9436 ? build_one_cst (type)
9437 : build_zero_cst (type));
9438 }
9439
9440 /* If ARG is non-negative, the result is always zero. */
9441 if (tree_expr_nonnegative_p (arg))
9442 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9443
9444 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9445 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9446 return fold_convert (type,
9447 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9448 build_real (TREE_TYPE (arg), dconst0)));
9449
9450 return NULL_TREE;
9451 }
9452
9453 /* Fold function call to builtin copysign, copysignf or copysignl with
9454 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9455 be made. */
9456
9457 static tree
9458 fold_builtin_copysign (location_t loc, tree fndecl,
9459 tree arg1, tree arg2, tree type)
9460 {
9461 tree tem;
9462
9463 if (!validate_arg (arg1, REAL_TYPE)
9464 || !validate_arg (arg2, REAL_TYPE))
9465 return NULL_TREE;
9466
9467 /* copysign(X,X) is X. */
9468 if (operand_equal_p (arg1, arg2, 0))
9469 return fold_convert_loc (loc, type, arg1);
9470
9471 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9472 if (TREE_CODE (arg1) == REAL_CST
9473 && TREE_CODE (arg2) == REAL_CST
9474 && !TREE_OVERFLOW (arg1)
9475 && !TREE_OVERFLOW (arg2))
9476 {
9477 REAL_VALUE_TYPE c1, c2;
9478
9479 c1 = TREE_REAL_CST (arg1);
9480 c2 = TREE_REAL_CST (arg2);
9481 /* c1.sign := c2.sign. */
9482 real_copysign (&c1, &c2);
9483 return build_real (type, c1);
9484 }
9485
9486 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9487 Remember to evaluate Y for side-effects. */
9488 if (tree_expr_nonnegative_p (arg2))
9489 return omit_one_operand_loc (loc, type,
9490 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9491 arg2);
9492
9493 /* Strip sign changing operations for the first argument. */
9494 tem = fold_strip_sign_ops (arg1);
9495 if (tem)
9496 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9497
9498 return NULL_TREE;
9499 }
9500
9501 /* Fold a call to builtin isascii with argument ARG. */
9502
9503 static tree
9504 fold_builtin_isascii (location_t loc, tree arg)
9505 {
9506 if (!validate_arg (arg, INTEGER_TYPE))
9507 return NULL_TREE;
9508 else
9509 {
9510 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9511 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9512 build_int_cst (integer_type_node,
9513 ~ (unsigned HOST_WIDE_INT) 0x7f));
9514 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9515 arg, integer_zero_node);
9516 }
9517 }
9518
9519 /* Fold a call to builtin toascii with argument ARG. */
9520
9521 static tree
9522 fold_builtin_toascii (location_t loc, tree arg)
9523 {
9524 if (!validate_arg (arg, INTEGER_TYPE))
9525 return NULL_TREE;
9526
9527 /* Transform toascii(c) -> (c & 0x7f). */
9528 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9529 build_int_cst (integer_type_node, 0x7f));
9530 }
9531
9532 /* Fold a call to builtin isdigit with argument ARG. */
9533
9534 static tree
9535 fold_builtin_isdigit (location_t loc, tree arg)
9536 {
9537 if (!validate_arg (arg, INTEGER_TYPE))
9538 return NULL_TREE;
9539 else
9540 {
9541 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9542 /* According to the C standard, isdigit is unaffected by locale.
9543 However, it definitely is affected by the target character set. */
9544 unsigned HOST_WIDE_INT target_digit0
9545 = lang_hooks.to_target_charset ('0');
9546
9547 if (target_digit0 == 0)
9548 return NULL_TREE;
9549
9550 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9551 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9552 build_int_cst (unsigned_type_node, target_digit0));
9553 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9554 build_int_cst (unsigned_type_node, 9));
9555 }
9556 }
9557
9558 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9559
9560 static tree
9561 fold_builtin_fabs (location_t loc, tree arg, tree type)
9562 {
9563 if (!validate_arg (arg, REAL_TYPE))
9564 return NULL_TREE;
9565
9566 arg = fold_convert_loc (loc, type, arg);
9567 if (TREE_CODE (arg) == REAL_CST)
9568 return fold_abs_const (arg, type);
9569 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9570 }
9571
9572 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9573
9574 static tree
9575 fold_builtin_abs (location_t loc, tree arg, tree type)
9576 {
9577 if (!validate_arg (arg, INTEGER_TYPE))
9578 return NULL_TREE;
9579
9580 arg = fold_convert_loc (loc, type, arg);
9581 if (TREE_CODE (arg) == INTEGER_CST)
9582 return fold_abs_const (arg, type);
9583 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9584 }
9585
9586 /* Fold a fma operation with arguments ARG[012]. */
9587
9588 tree
9589 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9590 tree type, tree arg0, tree arg1, tree arg2)
9591 {
9592 if (TREE_CODE (arg0) == REAL_CST
9593 && TREE_CODE (arg1) == REAL_CST
9594 && TREE_CODE (arg2) == REAL_CST)
9595 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9596
9597 return NULL_TREE;
9598 }
9599
9600 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9601
9602 static tree
9603 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9604 {
9605 if (validate_arg (arg0, REAL_TYPE)
9606 && validate_arg (arg1, REAL_TYPE)
9607 && validate_arg (arg2, REAL_TYPE))
9608 {
9609 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9610 if (tem)
9611 return tem;
9612
9613 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9614 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9615 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9616 }
9617 return NULL_TREE;
9618 }
9619
9620 /* Fold a call to builtin fmin or fmax. */
9621
9622 static tree
9623 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9624 tree type, bool max)
9625 {
9626 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9627 {
9628 /* Calculate the result when the argument is a constant. */
9629 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9630
9631 if (res)
9632 return res;
9633
9634 /* If either argument is NaN, return the other one. Avoid the
9635 transformation if we get (and honor) a signalling NaN. Using
9636 omit_one_operand() ensures we create a non-lvalue. */
9637 if (TREE_CODE (arg0) == REAL_CST
9638 && real_isnan (&TREE_REAL_CST (arg0))
9639 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9640 || ! TREE_REAL_CST (arg0).signalling))
9641 return omit_one_operand_loc (loc, type, arg1, arg0);
9642 if (TREE_CODE (arg1) == REAL_CST
9643 && real_isnan (&TREE_REAL_CST (arg1))
9644 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9645 || ! TREE_REAL_CST (arg1).signalling))
9646 return omit_one_operand_loc (loc, type, arg0, arg1);
9647
9648 /* Transform fmin/fmax(x,x) -> x. */
9649 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9650 return omit_one_operand_loc (loc, type, arg0, arg1);
9651
9652 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9653 functions to return the numeric arg if the other one is NaN.
9654 These tree codes don't honor that, so only transform if
9655 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9656 handled, so we don't have to worry about it either. */
9657 if (flag_finite_math_only)
9658 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9659 fold_convert_loc (loc, type, arg0),
9660 fold_convert_loc (loc, type, arg1));
9661 }
9662 return NULL_TREE;
9663 }
9664
9665 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9666
9667 static tree
9668 fold_builtin_carg (location_t loc, tree arg, tree type)
9669 {
9670 if (validate_arg (arg, COMPLEX_TYPE)
9671 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9672 {
9673 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9674
9675 if (atan2_fn)
9676 {
9677 tree new_arg = builtin_save_expr (arg);
9678 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9679 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9680 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9681 }
9682 }
9683
9684 return NULL_TREE;
9685 }
9686
9687 /* Fold a call to builtin logb/ilogb. */
9688
9689 static tree
9690 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9691 {
9692 if (! validate_arg (arg, REAL_TYPE))
9693 return NULL_TREE;
9694
9695 STRIP_NOPS (arg);
9696
9697 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9698 {
9699 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9700
9701 switch (value->cl)
9702 {
9703 case rvc_nan:
9704 case rvc_inf:
9705 /* If arg is Inf or NaN and we're logb, return it. */
9706 if (TREE_CODE (rettype) == REAL_TYPE)
9707 {
9708 /* For logb(-Inf) we have to return +Inf. */
9709 if (real_isinf (value) && real_isneg (value))
9710 {
9711 REAL_VALUE_TYPE tem;
9712 real_inf (&tem);
9713 return build_real (rettype, tem);
9714 }
9715 return fold_convert_loc (loc, rettype, arg);
9716 }
9717 /* Fall through... */
9718 case rvc_zero:
9719 /* Zero may set errno and/or raise an exception for logb, also
9720 for ilogb we don't know FP_ILOGB0. */
9721 return NULL_TREE;
9722 case rvc_normal:
9723 /* For normal numbers, proceed iff radix == 2. In GCC,
9724 normalized significands are in the range [0.5, 1.0). We
9725 want the exponent as if they were [1.0, 2.0) so get the
9726 exponent and subtract 1. */
9727 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9728 return fold_convert_loc (loc, rettype,
9729 build_int_cst (integer_type_node,
9730 REAL_EXP (value)-1));
9731 break;
9732 }
9733 }
9734
9735 return NULL_TREE;
9736 }
9737
9738 /* Fold a call to builtin significand, if radix == 2. */
9739
9740 static tree
9741 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9742 {
9743 if (! validate_arg (arg, REAL_TYPE))
9744 return NULL_TREE;
9745
9746 STRIP_NOPS (arg);
9747
9748 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9749 {
9750 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9751
9752 switch (value->cl)
9753 {
9754 case rvc_zero:
9755 case rvc_nan:
9756 case rvc_inf:
9757 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9758 return fold_convert_loc (loc, rettype, arg);
9759 case rvc_normal:
9760 /* For normal numbers, proceed iff radix == 2. */
9761 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9762 {
9763 REAL_VALUE_TYPE result = *value;
9764 /* In GCC, normalized significands are in the range [0.5,
9765 1.0). We want them to be [1.0, 2.0) so set the
9766 exponent to 1. */
9767 SET_REAL_EXP (&result, 1);
9768 return build_real (rettype, result);
9769 }
9770 break;
9771 }
9772 }
9773
9774 return NULL_TREE;
9775 }
9776
9777 /* Fold a call to builtin frexp, we can assume the base is 2. */
9778
9779 static tree
9780 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9781 {
9782 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9783 return NULL_TREE;
9784
9785 STRIP_NOPS (arg0);
9786
9787 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9788 return NULL_TREE;
9789
9790 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9791
9792 /* Proceed if a valid pointer type was passed in. */
9793 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9794 {
9795 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9796 tree frac, exp;
9797
9798 switch (value->cl)
9799 {
9800 case rvc_zero:
9801 /* For +-0, return (*exp = 0, +-0). */
9802 exp = integer_zero_node;
9803 frac = arg0;
9804 break;
9805 case rvc_nan:
9806 case rvc_inf:
9807 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9808 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9809 case rvc_normal:
9810 {
9811 /* Since the frexp function always expects base 2, and in
9812 GCC normalized significands are already in the range
9813 [0.5, 1.0), we have exactly what frexp wants. */
9814 REAL_VALUE_TYPE frac_rvt = *value;
9815 SET_REAL_EXP (&frac_rvt, 0);
9816 frac = build_real (rettype, frac_rvt);
9817 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9818 }
9819 break;
9820 default:
9821 gcc_unreachable ();
9822 }
9823
9824 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9825 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9826 TREE_SIDE_EFFECTS (arg1) = 1;
9827 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9828 }
9829
9830 return NULL_TREE;
9831 }
9832
9833 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9834 then we can assume the base is two. If it's false, then we have to
9835 check the mode of the TYPE parameter in certain cases. */
9836
9837 static tree
9838 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9839 tree type, bool ldexp)
9840 {
9841 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9842 {
9843 STRIP_NOPS (arg0);
9844 STRIP_NOPS (arg1);
9845
9846 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9847 if (real_zerop (arg0) || integer_zerop (arg1)
9848 || (TREE_CODE (arg0) == REAL_CST
9849 && !real_isfinite (&TREE_REAL_CST (arg0))))
9850 return omit_one_operand_loc (loc, type, arg0, arg1);
9851
9852 /* If both arguments are constant, then try to evaluate it. */
9853 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9854 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9855 && tree_fits_shwi_p (arg1))
9856 {
9857 /* Bound the maximum adjustment to twice the range of the
9858 mode's valid exponents. Use abs to ensure the range is
9859 positive as a sanity check. */
9860 const long max_exp_adj = 2 *
9861 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9862 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9863
9864 /* Get the user-requested adjustment. */
9865 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9866
9867 /* The requested adjustment must be inside this range. This
9868 is a preliminary cap to avoid things like overflow, we
9869 may still fail to compute the result for other reasons. */
9870 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9871 {
9872 REAL_VALUE_TYPE initial_result;
9873
9874 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9875
9876 /* Ensure we didn't overflow. */
9877 if (! real_isinf (&initial_result))
9878 {
9879 const REAL_VALUE_TYPE trunc_result
9880 = real_value_truncate (TYPE_MODE (type), initial_result);
9881
9882 /* Only proceed if the target mode can hold the
9883 resulting value. */
9884 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9885 return build_real (type, trunc_result);
9886 }
9887 }
9888 }
9889 }
9890
9891 return NULL_TREE;
9892 }
9893
9894 /* Fold a call to builtin modf. */
9895
9896 static tree
9897 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9898 {
9899 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9900 return NULL_TREE;
9901
9902 STRIP_NOPS (arg0);
9903
9904 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9905 return NULL_TREE;
9906
9907 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9908
9909 /* Proceed if a valid pointer type was passed in. */
9910 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9911 {
9912 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9913 REAL_VALUE_TYPE trunc, frac;
9914
9915 switch (value->cl)
9916 {
9917 case rvc_nan:
9918 case rvc_zero:
9919 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9920 trunc = frac = *value;
9921 break;
9922 case rvc_inf:
9923 /* For +-Inf, return (*arg1 = arg0, +-0). */
9924 frac = dconst0;
9925 frac.sign = value->sign;
9926 trunc = *value;
9927 break;
9928 case rvc_normal:
9929 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9930 real_trunc (&trunc, VOIDmode, value);
9931 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9932 /* If the original number was negative and already
9933 integral, then the fractional part is -0.0. */
9934 if (value->sign && frac.cl == rvc_zero)
9935 frac.sign = value->sign;
9936 break;
9937 }
9938
9939 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9940 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9941 build_real (rettype, trunc));
9942 TREE_SIDE_EFFECTS (arg1) = 1;
9943 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9944 build_real (rettype, frac));
9945 }
9946
9947 return NULL_TREE;
9948 }
9949
9950 /* Given a location LOC, an interclass builtin function decl FNDECL
9951 and its single argument ARG, return an folded expression computing
9952 the same, or NULL_TREE if we either couldn't or didn't want to fold
9953 (the latter happen if there's an RTL instruction available). */
9954
9955 static tree
9956 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9957 {
9958 enum machine_mode mode;
9959
9960 if (!validate_arg (arg, REAL_TYPE))
9961 return NULL_TREE;
9962
9963 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9964 return NULL_TREE;
9965
9966 mode = TYPE_MODE (TREE_TYPE (arg));
9967
9968 /* If there is no optab, try generic code. */
9969 switch (DECL_FUNCTION_CODE (fndecl))
9970 {
9971 tree result;
9972
9973 CASE_FLT_FN (BUILT_IN_ISINF):
9974 {
9975 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9976 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9977 tree const type = TREE_TYPE (arg);
9978 REAL_VALUE_TYPE r;
9979 char buf[128];
9980
9981 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9982 real_from_string (&r, buf);
9983 result = build_call_expr (isgr_fn, 2,
9984 fold_build1_loc (loc, ABS_EXPR, type, arg),
9985 build_real (type, r));
9986 return result;
9987 }
9988 CASE_FLT_FN (BUILT_IN_FINITE):
9989 case BUILT_IN_ISFINITE:
9990 {
9991 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9992 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9993 tree const type = TREE_TYPE (arg);
9994 REAL_VALUE_TYPE r;
9995 char buf[128];
9996
9997 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9998 real_from_string (&r, buf);
9999 result = build_call_expr (isle_fn, 2,
10000 fold_build1_loc (loc, ABS_EXPR, type, arg),
10001 build_real (type, r));
10002 /*result = fold_build2_loc (loc, UNGT_EXPR,
10003 TREE_TYPE (TREE_TYPE (fndecl)),
10004 fold_build1_loc (loc, ABS_EXPR, type, arg),
10005 build_real (type, r));
10006 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10007 TREE_TYPE (TREE_TYPE (fndecl)),
10008 result);*/
10009 return result;
10010 }
10011 case BUILT_IN_ISNORMAL:
10012 {
10013 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10014 islessequal(fabs(x),DBL_MAX). */
10015 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10016 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10017 tree const type = TREE_TYPE (arg);
10018 REAL_VALUE_TYPE rmax, rmin;
10019 char buf[128];
10020
10021 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10022 real_from_string (&rmax, buf);
10023 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10024 real_from_string (&rmin, buf);
10025 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10026 result = build_call_expr (isle_fn, 2, arg,
10027 build_real (type, rmax));
10028 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10029 build_call_expr (isge_fn, 2, arg,
10030 build_real (type, rmin)));
10031 return result;
10032 }
10033 default:
10034 break;
10035 }
10036
10037 return NULL_TREE;
10038 }
10039
10040 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10041 ARG is the argument for the call. */
10042
10043 static tree
10044 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10045 {
10046 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10047 REAL_VALUE_TYPE r;
10048
10049 if (!validate_arg (arg, REAL_TYPE))
10050 return NULL_TREE;
10051
10052 switch (builtin_index)
10053 {
10054 case BUILT_IN_ISINF:
10055 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10056 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10057
10058 if (TREE_CODE (arg) == REAL_CST)
10059 {
10060 r = TREE_REAL_CST (arg);
10061 if (real_isinf (&r))
10062 return real_compare (GT_EXPR, &r, &dconst0)
10063 ? integer_one_node : integer_minus_one_node;
10064 else
10065 return integer_zero_node;
10066 }
10067
10068 return NULL_TREE;
10069
10070 case BUILT_IN_ISINF_SIGN:
10071 {
10072 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10073 /* In a boolean context, GCC will fold the inner COND_EXPR to
10074 1. So e.g. "if (isinf_sign(x))" would be folded to just
10075 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10076 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10077 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10078 tree tmp = NULL_TREE;
10079
10080 arg = builtin_save_expr (arg);
10081
10082 if (signbit_fn && isinf_fn)
10083 {
10084 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10085 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10086
10087 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10088 signbit_call, integer_zero_node);
10089 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10090 isinf_call, integer_zero_node);
10091
10092 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10093 integer_minus_one_node, integer_one_node);
10094 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10095 isinf_call, tmp,
10096 integer_zero_node);
10097 }
10098
10099 return tmp;
10100 }
10101
10102 case BUILT_IN_ISFINITE:
10103 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10104 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10105 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10106
10107 if (TREE_CODE (arg) == REAL_CST)
10108 {
10109 r = TREE_REAL_CST (arg);
10110 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10111 }
10112
10113 return NULL_TREE;
10114
10115 case BUILT_IN_ISNAN:
10116 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10117 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10118
10119 if (TREE_CODE (arg) == REAL_CST)
10120 {
10121 r = TREE_REAL_CST (arg);
10122 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10123 }
10124
10125 arg = builtin_save_expr (arg);
10126 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10127
10128 default:
10129 gcc_unreachable ();
10130 }
10131 }
10132
10133 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10134 This builtin will generate code to return the appropriate floating
10135 point classification depending on the value of the floating point
10136 number passed in. The possible return values must be supplied as
10137 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10138 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10139 one floating point argument which is "type generic". */
10140
10141 static tree
10142 fold_builtin_fpclassify (location_t loc, tree exp)
10143 {
10144 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10145 arg, type, res, tmp;
10146 enum machine_mode mode;
10147 REAL_VALUE_TYPE r;
10148 char buf[128];
10149
10150 /* Verify the required arguments in the original call. */
10151 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10152 INTEGER_TYPE, INTEGER_TYPE,
10153 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10154 return NULL_TREE;
10155
10156 fp_nan = CALL_EXPR_ARG (exp, 0);
10157 fp_infinite = CALL_EXPR_ARG (exp, 1);
10158 fp_normal = CALL_EXPR_ARG (exp, 2);
10159 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10160 fp_zero = CALL_EXPR_ARG (exp, 4);
10161 arg = CALL_EXPR_ARG (exp, 5);
10162 type = TREE_TYPE (arg);
10163 mode = TYPE_MODE (type);
10164 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10165
10166 /* fpclassify(x) ->
10167 isnan(x) ? FP_NAN :
10168 (fabs(x) == Inf ? FP_INFINITE :
10169 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10170 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10171
10172 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10173 build_real (type, dconst0));
10174 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10175 tmp, fp_zero, fp_subnormal);
10176
10177 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10178 real_from_string (&r, buf);
10179 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10180 arg, build_real (type, r));
10181 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10182
10183 if (HONOR_INFINITIES (mode))
10184 {
10185 real_inf (&r);
10186 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10187 build_real (type, r));
10188 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10189 fp_infinite, res);
10190 }
10191
10192 if (HONOR_NANS (mode))
10193 {
10194 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10195 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10196 }
10197
10198 return res;
10199 }
10200
10201 /* Fold a call to an unordered comparison function such as
10202 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10203 being called and ARG0 and ARG1 are the arguments for the call.
10204 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10205 the opposite of the desired result. UNORDERED_CODE is used
10206 for modes that can hold NaNs and ORDERED_CODE is used for
10207 the rest. */
10208
10209 static tree
10210 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10211 enum tree_code unordered_code,
10212 enum tree_code ordered_code)
10213 {
10214 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10215 enum tree_code code;
10216 tree type0, type1;
10217 enum tree_code code0, code1;
10218 tree cmp_type = NULL_TREE;
10219
10220 type0 = TREE_TYPE (arg0);
10221 type1 = TREE_TYPE (arg1);
10222
10223 code0 = TREE_CODE (type0);
10224 code1 = TREE_CODE (type1);
10225
10226 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10227 /* Choose the wider of two real types. */
10228 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10229 ? type0 : type1;
10230 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10231 cmp_type = type0;
10232 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10233 cmp_type = type1;
10234
10235 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10236 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10237
10238 if (unordered_code == UNORDERED_EXPR)
10239 {
10240 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10241 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10242 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10243 }
10244
10245 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10246 : ordered_code;
10247 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10248 fold_build2_loc (loc, code, type, arg0, arg1));
10249 }
10250
10251 /* Fold a call to built-in function FNDECL with 0 arguments.
10252 IGNORE is true if the result of the function call is ignored. This
10253 function returns NULL_TREE if no simplification was possible. */
10254
10255 static tree
10256 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10257 {
10258 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10259 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10260 switch (fcode)
10261 {
10262 CASE_FLT_FN (BUILT_IN_INF):
10263 case BUILT_IN_INFD32:
10264 case BUILT_IN_INFD64:
10265 case BUILT_IN_INFD128:
10266 return fold_builtin_inf (loc, type, true);
10267
10268 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10269 return fold_builtin_inf (loc, type, false);
10270
10271 case BUILT_IN_CLASSIFY_TYPE:
10272 return fold_builtin_classify_type (NULL_TREE);
10273
10274 case BUILT_IN_UNREACHABLE:
10275 if (flag_sanitize & SANITIZE_UNREACHABLE
10276 && (current_function_decl == NULL
10277 || !lookup_attribute ("no_sanitize_undefined",
10278 DECL_ATTRIBUTES (current_function_decl))))
10279 return ubsan_instrument_unreachable (loc);
10280 break;
10281
10282 default:
10283 break;
10284 }
10285 return NULL_TREE;
10286 }
10287
10288 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10289 IGNORE is true if the result of the function call is ignored. This
10290 function returns NULL_TREE if no simplification was possible. */
10291
10292 static tree
10293 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10294 {
10295 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10296 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10297 switch (fcode)
10298 {
10299 case BUILT_IN_CONSTANT_P:
10300 {
10301 tree val = fold_builtin_constant_p (arg0);
10302
10303 /* Gimplification will pull the CALL_EXPR for the builtin out of
10304 an if condition. When not optimizing, we'll not CSE it back.
10305 To avoid link error types of regressions, return false now. */
10306 if (!val && !optimize)
10307 val = integer_zero_node;
10308
10309 return val;
10310 }
10311
10312 case BUILT_IN_CLASSIFY_TYPE:
10313 return fold_builtin_classify_type (arg0);
10314
10315 case BUILT_IN_STRLEN:
10316 return fold_builtin_strlen (loc, type, arg0);
10317
10318 CASE_FLT_FN (BUILT_IN_FABS):
10319 case BUILT_IN_FABSD32:
10320 case BUILT_IN_FABSD64:
10321 case BUILT_IN_FABSD128:
10322 return fold_builtin_fabs (loc, arg0, type);
10323
10324 case BUILT_IN_ABS:
10325 case BUILT_IN_LABS:
10326 case BUILT_IN_LLABS:
10327 case BUILT_IN_IMAXABS:
10328 return fold_builtin_abs (loc, arg0, type);
10329
10330 CASE_FLT_FN (BUILT_IN_CONJ):
10331 if (validate_arg (arg0, COMPLEX_TYPE)
10332 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10333 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10334 break;
10335
10336 CASE_FLT_FN (BUILT_IN_CREAL):
10337 if (validate_arg (arg0, COMPLEX_TYPE)
10338 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10339 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10340 break;
10341
10342 CASE_FLT_FN (BUILT_IN_CIMAG):
10343 if (validate_arg (arg0, COMPLEX_TYPE)
10344 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10345 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10346 break;
10347
10348 CASE_FLT_FN (BUILT_IN_CCOS):
10349 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10350
10351 CASE_FLT_FN (BUILT_IN_CCOSH):
10352 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10353
10354 CASE_FLT_FN (BUILT_IN_CPROJ):
10355 return fold_builtin_cproj (loc, arg0, type);
10356
10357 CASE_FLT_FN (BUILT_IN_CSIN):
10358 if (validate_arg (arg0, COMPLEX_TYPE)
10359 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10360 return do_mpc_arg1 (arg0, type, mpc_sin);
10361 break;
10362
10363 CASE_FLT_FN (BUILT_IN_CSINH):
10364 if (validate_arg (arg0, COMPLEX_TYPE)
10365 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10366 return do_mpc_arg1 (arg0, type, mpc_sinh);
10367 break;
10368
10369 CASE_FLT_FN (BUILT_IN_CTAN):
10370 if (validate_arg (arg0, COMPLEX_TYPE)
10371 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10372 return do_mpc_arg1 (arg0, type, mpc_tan);
10373 break;
10374
10375 CASE_FLT_FN (BUILT_IN_CTANH):
10376 if (validate_arg (arg0, COMPLEX_TYPE)
10377 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10378 return do_mpc_arg1 (arg0, type, mpc_tanh);
10379 break;
10380
10381 CASE_FLT_FN (BUILT_IN_CLOG):
10382 if (validate_arg (arg0, COMPLEX_TYPE)
10383 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10384 return do_mpc_arg1 (arg0, type, mpc_log);
10385 break;
10386
10387 CASE_FLT_FN (BUILT_IN_CSQRT):
10388 if (validate_arg (arg0, COMPLEX_TYPE)
10389 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10390 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10391 break;
10392
10393 CASE_FLT_FN (BUILT_IN_CASIN):
10394 if (validate_arg (arg0, COMPLEX_TYPE)
10395 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10396 return do_mpc_arg1 (arg0, type, mpc_asin);
10397 break;
10398
10399 CASE_FLT_FN (BUILT_IN_CACOS):
10400 if (validate_arg (arg0, COMPLEX_TYPE)
10401 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10402 return do_mpc_arg1 (arg0, type, mpc_acos);
10403 break;
10404
10405 CASE_FLT_FN (BUILT_IN_CATAN):
10406 if (validate_arg (arg0, COMPLEX_TYPE)
10407 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10408 return do_mpc_arg1 (arg0, type, mpc_atan);
10409 break;
10410
10411 CASE_FLT_FN (BUILT_IN_CASINH):
10412 if (validate_arg (arg0, COMPLEX_TYPE)
10413 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10414 return do_mpc_arg1 (arg0, type, mpc_asinh);
10415 break;
10416
10417 CASE_FLT_FN (BUILT_IN_CACOSH):
10418 if (validate_arg (arg0, COMPLEX_TYPE)
10419 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10420 return do_mpc_arg1 (arg0, type, mpc_acosh);
10421 break;
10422
10423 CASE_FLT_FN (BUILT_IN_CATANH):
10424 if (validate_arg (arg0, COMPLEX_TYPE)
10425 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10426 return do_mpc_arg1 (arg0, type, mpc_atanh);
10427 break;
10428
10429 CASE_FLT_FN (BUILT_IN_CABS):
10430 return fold_builtin_cabs (loc, arg0, type, fndecl);
10431
10432 CASE_FLT_FN (BUILT_IN_CARG):
10433 return fold_builtin_carg (loc, arg0, type);
10434
10435 CASE_FLT_FN (BUILT_IN_SQRT):
10436 return fold_builtin_sqrt (loc, arg0, type);
10437
10438 CASE_FLT_FN (BUILT_IN_CBRT):
10439 return fold_builtin_cbrt (loc, arg0, type);
10440
10441 CASE_FLT_FN (BUILT_IN_ASIN):
10442 if (validate_arg (arg0, REAL_TYPE))
10443 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10444 &dconstm1, &dconst1, true);
10445 break;
10446
10447 CASE_FLT_FN (BUILT_IN_ACOS):
10448 if (validate_arg (arg0, REAL_TYPE))
10449 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10450 &dconstm1, &dconst1, true);
10451 break;
10452
10453 CASE_FLT_FN (BUILT_IN_ATAN):
10454 if (validate_arg (arg0, REAL_TYPE))
10455 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10456 break;
10457
10458 CASE_FLT_FN (BUILT_IN_ASINH):
10459 if (validate_arg (arg0, REAL_TYPE))
10460 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10461 break;
10462
10463 CASE_FLT_FN (BUILT_IN_ACOSH):
10464 if (validate_arg (arg0, REAL_TYPE))
10465 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10466 &dconst1, NULL, true);
10467 break;
10468
10469 CASE_FLT_FN (BUILT_IN_ATANH):
10470 if (validate_arg (arg0, REAL_TYPE))
10471 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10472 &dconstm1, &dconst1, false);
10473 break;
10474
10475 CASE_FLT_FN (BUILT_IN_SIN):
10476 if (validate_arg (arg0, REAL_TYPE))
10477 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10478 break;
10479
10480 CASE_FLT_FN (BUILT_IN_COS):
10481 return fold_builtin_cos (loc, arg0, type, fndecl);
10482
10483 CASE_FLT_FN (BUILT_IN_TAN):
10484 return fold_builtin_tan (arg0, type);
10485
10486 CASE_FLT_FN (BUILT_IN_CEXP):
10487 return fold_builtin_cexp (loc, arg0, type);
10488
10489 CASE_FLT_FN (BUILT_IN_CEXPI):
10490 if (validate_arg (arg0, REAL_TYPE))
10491 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10492 break;
10493
10494 CASE_FLT_FN (BUILT_IN_SINH):
10495 if (validate_arg (arg0, REAL_TYPE))
10496 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10497 break;
10498
10499 CASE_FLT_FN (BUILT_IN_COSH):
10500 return fold_builtin_cosh (loc, arg0, type, fndecl);
10501
10502 CASE_FLT_FN (BUILT_IN_TANH):
10503 if (validate_arg (arg0, REAL_TYPE))
10504 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10505 break;
10506
10507 CASE_FLT_FN (BUILT_IN_ERF):
10508 if (validate_arg (arg0, REAL_TYPE))
10509 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10510 break;
10511
10512 CASE_FLT_FN (BUILT_IN_ERFC):
10513 if (validate_arg (arg0, REAL_TYPE))
10514 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10515 break;
10516
10517 CASE_FLT_FN (BUILT_IN_TGAMMA):
10518 if (validate_arg (arg0, REAL_TYPE))
10519 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10520 break;
10521
10522 CASE_FLT_FN (BUILT_IN_EXP):
10523 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10524
10525 CASE_FLT_FN (BUILT_IN_EXP2):
10526 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10527
10528 CASE_FLT_FN (BUILT_IN_EXP10):
10529 CASE_FLT_FN (BUILT_IN_POW10):
10530 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10531
10532 CASE_FLT_FN (BUILT_IN_EXPM1):
10533 if (validate_arg (arg0, REAL_TYPE))
10534 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10535 break;
10536
10537 CASE_FLT_FN (BUILT_IN_LOG):
10538 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10539
10540 CASE_FLT_FN (BUILT_IN_LOG2):
10541 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10542
10543 CASE_FLT_FN (BUILT_IN_LOG10):
10544 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10545
10546 CASE_FLT_FN (BUILT_IN_LOG1P):
10547 if (validate_arg (arg0, REAL_TYPE))
10548 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10549 &dconstm1, NULL, false);
10550 break;
10551
10552 CASE_FLT_FN (BUILT_IN_J0):
10553 if (validate_arg (arg0, REAL_TYPE))
10554 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10555 NULL, NULL, 0);
10556 break;
10557
10558 CASE_FLT_FN (BUILT_IN_J1):
10559 if (validate_arg (arg0, REAL_TYPE))
10560 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10561 NULL, NULL, 0);
10562 break;
10563
10564 CASE_FLT_FN (BUILT_IN_Y0):
10565 if (validate_arg (arg0, REAL_TYPE))
10566 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10567 &dconst0, NULL, false);
10568 break;
10569
10570 CASE_FLT_FN (BUILT_IN_Y1):
10571 if (validate_arg (arg0, REAL_TYPE))
10572 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10573 &dconst0, NULL, false);
10574 break;
10575
10576 CASE_FLT_FN (BUILT_IN_NAN):
10577 case BUILT_IN_NAND32:
10578 case BUILT_IN_NAND64:
10579 case BUILT_IN_NAND128:
10580 return fold_builtin_nan (arg0, type, true);
10581
10582 CASE_FLT_FN (BUILT_IN_NANS):
10583 return fold_builtin_nan (arg0, type, false);
10584
10585 CASE_FLT_FN (BUILT_IN_FLOOR):
10586 return fold_builtin_floor (loc, fndecl, arg0);
10587
10588 CASE_FLT_FN (BUILT_IN_CEIL):
10589 return fold_builtin_ceil (loc, fndecl, arg0);
10590
10591 CASE_FLT_FN (BUILT_IN_TRUNC):
10592 return fold_builtin_trunc (loc, fndecl, arg0);
10593
10594 CASE_FLT_FN (BUILT_IN_ROUND):
10595 return fold_builtin_round (loc, fndecl, arg0);
10596
10597 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10598 CASE_FLT_FN (BUILT_IN_RINT):
10599 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10600
10601 CASE_FLT_FN (BUILT_IN_ICEIL):
10602 CASE_FLT_FN (BUILT_IN_LCEIL):
10603 CASE_FLT_FN (BUILT_IN_LLCEIL):
10604 CASE_FLT_FN (BUILT_IN_LFLOOR):
10605 CASE_FLT_FN (BUILT_IN_IFLOOR):
10606 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10607 CASE_FLT_FN (BUILT_IN_IROUND):
10608 CASE_FLT_FN (BUILT_IN_LROUND):
10609 CASE_FLT_FN (BUILT_IN_LLROUND):
10610 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10611
10612 CASE_FLT_FN (BUILT_IN_IRINT):
10613 CASE_FLT_FN (BUILT_IN_LRINT):
10614 CASE_FLT_FN (BUILT_IN_LLRINT):
10615 return fold_fixed_mathfn (loc, fndecl, arg0);
10616
10617 case BUILT_IN_BSWAP16:
10618 case BUILT_IN_BSWAP32:
10619 case BUILT_IN_BSWAP64:
10620 return fold_builtin_bswap (fndecl, arg0);
10621
10622 CASE_INT_FN (BUILT_IN_FFS):
10623 CASE_INT_FN (BUILT_IN_CLZ):
10624 CASE_INT_FN (BUILT_IN_CTZ):
10625 CASE_INT_FN (BUILT_IN_CLRSB):
10626 CASE_INT_FN (BUILT_IN_POPCOUNT):
10627 CASE_INT_FN (BUILT_IN_PARITY):
10628 return fold_builtin_bitop (fndecl, arg0);
10629
10630 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10631 return fold_builtin_signbit (loc, arg0, type);
10632
10633 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10634 return fold_builtin_significand (loc, arg0, type);
10635
10636 CASE_FLT_FN (BUILT_IN_ILOGB):
10637 CASE_FLT_FN (BUILT_IN_LOGB):
10638 return fold_builtin_logb (loc, arg0, type);
10639
10640 case BUILT_IN_ISASCII:
10641 return fold_builtin_isascii (loc, arg0);
10642
10643 case BUILT_IN_TOASCII:
10644 return fold_builtin_toascii (loc, arg0);
10645
10646 case BUILT_IN_ISDIGIT:
10647 return fold_builtin_isdigit (loc, arg0);
10648
10649 CASE_FLT_FN (BUILT_IN_FINITE):
10650 case BUILT_IN_FINITED32:
10651 case BUILT_IN_FINITED64:
10652 case BUILT_IN_FINITED128:
10653 case BUILT_IN_ISFINITE:
10654 {
10655 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10656 if (ret)
10657 return ret;
10658 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10659 }
10660
10661 CASE_FLT_FN (BUILT_IN_ISINF):
10662 case BUILT_IN_ISINFD32:
10663 case BUILT_IN_ISINFD64:
10664 case BUILT_IN_ISINFD128:
10665 {
10666 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10667 if (ret)
10668 return ret;
10669 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10670 }
10671
10672 case BUILT_IN_ISNORMAL:
10673 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10674
10675 case BUILT_IN_ISINF_SIGN:
10676 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10677
10678 CASE_FLT_FN (BUILT_IN_ISNAN):
10679 case BUILT_IN_ISNAND32:
10680 case BUILT_IN_ISNAND64:
10681 case BUILT_IN_ISNAND128:
10682 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10683
10684 case BUILT_IN_PRINTF:
10685 case BUILT_IN_PRINTF_UNLOCKED:
10686 case BUILT_IN_VPRINTF:
10687 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10688
10689 case BUILT_IN_FREE:
10690 if (integer_zerop (arg0))
10691 return build_empty_stmt (loc);
10692 break;
10693
10694 default:
10695 break;
10696 }
10697
10698 return NULL_TREE;
10699
10700 }
10701
10702 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10703 IGNORE is true if the result of the function call is ignored. This
10704 function returns NULL_TREE if no simplification was possible. */
10705
10706 static tree
10707 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10708 {
10709 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10710 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10711
10712 switch (fcode)
10713 {
10714 CASE_FLT_FN (BUILT_IN_JN):
10715 if (validate_arg (arg0, INTEGER_TYPE)
10716 && validate_arg (arg1, REAL_TYPE))
10717 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10718 break;
10719
10720 CASE_FLT_FN (BUILT_IN_YN):
10721 if (validate_arg (arg0, INTEGER_TYPE)
10722 && validate_arg (arg1, REAL_TYPE))
10723 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10724 &dconst0, false);
10725 break;
10726
10727 CASE_FLT_FN (BUILT_IN_DREM):
10728 CASE_FLT_FN (BUILT_IN_REMAINDER):
10729 if (validate_arg (arg0, REAL_TYPE)
10730 && validate_arg (arg1, REAL_TYPE))
10731 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10732 break;
10733
10734 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10735 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10736 if (validate_arg (arg0, REAL_TYPE)
10737 && validate_arg (arg1, POINTER_TYPE))
10738 return do_mpfr_lgamma_r (arg0, arg1, type);
10739 break;
10740
10741 CASE_FLT_FN (BUILT_IN_ATAN2):
10742 if (validate_arg (arg0, REAL_TYPE)
10743 && validate_arg (arg1, REAL_TYPE))
10744 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10745 break;
10746
10747 CASE_FLT_FN (BUILT_IN_FDIM):
10748 if (validate_arg (arg0, REAL_TYPE)
10749 && validate_arg (arg1, REAL_TYPE))
10750 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10751 break;
10752
10753 CASE_FLT_FN (BUILT_IN_HYPOT):
10754 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10755
10756 CASE_FLT_FN (BUILT_IN_CPOW):
10757 if (validate_arg (arg0, COMPLEX_TYPE)
10758 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10759 && validate_arg (arg1, COMPLEX_TYPE)
10760 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10761 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10762 break;
10763
10764 CASE_FLT_FN (BUILT_IN_LDEXP):
10765 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10766 CASE_FLT_FN (BUILT_IN_SCALBN):
10767 CASE_FLT_FN (BUILT_IN_SCALBLN):
10768 return fold_builtin_load_exponent (loc, arg0, arg1,
10769 type, /*ldexp=*/false);
10770
10771 CASE_FLT_FN (BUILT_IN_FREXP):
10772 return fold_builtin_frexp (loc, arg0, arg1, type);
10773
10774 CASE_FLT_FN (BUILT_IN_MODF):
10775 return fold_builtin_modf (loc, arg0, arg1, type);
10776
10777 case BUILT_IN_BZERO:
10778 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10779
10780 case BUILT_IN_FPUTS:
10781 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10782
10783 case BUILT_IN_FPUTS_UNLOCKED:
10784 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10785
10786 case BUILT_IN_STRSTR:
10787 return fold_builtin_strstr (loc, arg0, arg1, type);
10788
10789 case BUILT_IN_STRCAT:
10790 return fold_builtin_strcat (loc, arg0, arg1);
10791
10792 case BUILT_IN_STRSPN:
10793 return fold_builtin_strspn (loc, arg0, arg1);
10794
10795 case BUILT_IN_STRCSPN:
10796 return fold_builtin_strcspn (loc, arg0, arg1);
10797
10798 case BUILT_IN_STRCHR:
10799 case BUILT_IN_INDEX:
10800 return fold_builtin_strchr (loc, arg0, arg1, type);
10801
10802 case BUILT_IN_STRRCHR:
10803 case BUILT_IN_RINDEX:
10804 return fold_builtin_strrchr (loc, arg0, arg1, type);
10805
10806 case BUILT_IN_STRCPY:
10807 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10808
10809 case BUILT_IN_STPCPY:
10810 if (ignore)
10811 {
10812 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10813 if (!fn)
10814 break;
10815
10816 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10817 }
10818 else
10819 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10820 break;
10821
10822 case BUILT_IN_STRCMP:
10823 return fold_builtin_strcmp (loc, arg0, arg1);
10824
10825 case BUILT_IN_STRPBRK:
10826 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10827
10828 case BUILT_IN_EXPECT:
10829 return fold_builtin_expect (loc, arg0, arg1);
10830
10831 CASE_FLT_FN (BUILT_IN_POW):
10832 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10833
10834 CASE_FLT_FN (BUILT_IN_POWI):
10835 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10836
10837 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10838 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10839
10840 CASE_FLT_FN (BUILT_IN_FMIN):
10841 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10842
10843 CASE_FLT_FN (BUILT_IN_FMAX):
10844 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10845
10846 case BUILT_IN_ISGREATER:
10847 return fold_builtin_unordered_cmp (loc, fndecl,
10848 arg0, arg1, UNLE_EXPR, LE_EXPR);
10849 case BUILT_IN_ISGREATEREQUAL:
10850 return fold_builtin_unordered_cmp (loc, fndecl,
10851 arg0, arg1, UNLT_EXPR, LT_EXPR);
10852 case BUILT_IN_ISLESS:
10853 return fold_builtin_unordered_cmp (loc, fndecl,
10854 arg0, arg1, UNGE_EXPR, GE_EXPR);
10855 case BUILT_IN_ISLESSEQUAL:
10856 return fold_builtin_unordered_cmp (loc, fndecl,
10857 arg0, arg1, UNGT_EXPR, GT_EXPR);
10858 case BUILT_IN_ISLESSGREATER:
10859 return fold_builtin_unordered_cmp (loc, fndecl,
10860 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10861 case BUILT_IN_ISUNORDERED:
10862 return fold_builtin_unordered_cmp (loc, fndecl,
10863 arg0, arg1, UNORDERED_EXPR,
10864 NOP_EXPR);
10865
10866 /* We do the folding for va_start in the expander. */
10867 case BUILT_IN_VA_START:
10868 break;
10869
10870 case BUILT_IN_SPRINTF:
10871 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10872
10873 case BUILT_IN_OBJECT_SIZE:
10874 return fold_builtin_object_size (arg0, arg1);
10875
10876 case BUILT_IN_PRINTF:
10877 case BUILT_IN_PRINTF_UNLOCKED:
10878 case BUILT_IN_VPRINTF:
10879 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10880
10881 case BUILT_IN_PRINTF_CHK:
10882 case BUILT_IN_VPRINTF_CHK:
10883 if (!validate_arg (arg0, INTEGER_TYPE)
10884 || TREE_SIDE_EFFECTS (arg0))
10885 return NULL_TREE;
10886 else
10887 return fold_builtin_printf (loc, fndecl,
10888 arg1, NULL_TREE, ignore, fcode);
10889 break;
10890
10891 case BUILT_IN_FPRINTF:
10892 case BUILT_IN_FPRINTF_UNLOCKED:
10893 case BUILT_IN_VFPRINTF:
10894 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10895 ignore, fcode);
10896
10897 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10898 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10899
10900 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10901 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10902
10903 default:
10904 break;
10905 }
10906 return NULL_TREE;
10907 }
10908
10909 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10910 and ARG2. IGNORE is true if the result of the function call is ignored.
10911 This function returns NULL_TREE if no simplification was possible. */
10912
10913 static tree
10914 fold_builtin_3 (location_t loc, tree fndecl,
10915 tree arg0, tree arg1, tree arg2, bool ignore)
10916 {
10917 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10918 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10919 switch (fcode)
10920 {
10921
10922 CASE_FLT_FN (BUILT_IN_SINCOS):
10923 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10924
10925 CASE_FLT_FN (BUILT_IN_FMA):
10926 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10927 break;
10928
10929 CASE_FLT_FN (BUILT_IN_REMQUO):
10930 if (validate_arg (arg0, REAL_TYPE)
10931 && validate_arg (arg1, REAL_TYPE)
10932 && validate_arg (arg2, POINTER_TYPE))
10933 return do_mpfr_remquo (arg0, arg1, arg2);
10934 break;
10935
10936 case BUILT_IN_MEMSET:
10937 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10938
10939 case BUILT_IN_BCOPY:
10940 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10941 void_type_node, true, /*endp=*/3);
10942
10943 case BUILT_IN_MEMCPY:
10944 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10945 type, ignore, /*endp=*/0);
10946
10947 case BUILT_IN_MEMPCPY:
10948 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10949 type, ignore, /*endp=*/1);
10950
10951 case BUILT_IN_MEMMOVE:
10952 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10953 type, ignore, /*endp=*/3);
10954
10955 case BUILT_IN_STRNCAT:
10956 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10957
10958 case BUILT_IN_STRNCPY:
10959 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10960
10961 case BUILT_IN_STRNCMP:
10962 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10963
10964 case BUILT_IN_MEMCHR:
10965 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10966
10967 case BUILT_IN_BCMP:
10968 case BUILT_IN_MEMCMP:
10969 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10970
10971 case BUILT_IN_SPRINTF:
10972 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10973
10974 case BUILT_IN_SNPRINTF:
10975 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10976
10977 case BUILT_IN_STRCPY_CHK:
10978 case BUILT_IN_STPCPY_CHK:
10979 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10980 ignore, fcode);
10981
10982 case BUILT_IN_STRCAT_CHK:
10983 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10984
10985 case BUILT_IN_PRINTF_CHK:
10986 case BUILT_IN_VPRINTF_CHK:
10987 if (!validate_arg (arg0, INTEGER_TYPE)
10988 || TREE_SIDE_EFFECTS (arg0))
10989 return NULL_TREE;
10990 else
10991 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10992 break;
10993
10994 case BUILT_IN_FPRINTF:
10995 case BUILT_IN_FPRINTF_UNLOCKED:
10996 case BUILT_IN_VFPRINTF:
10997 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10998 ignore, fcode);
10999
11000 case BUILT_IN_FPRINTF_CHK:
11001 case BUILT_IN_VFPRINTF_CHK:
11002 if (!validate_arg (arg1, INTEGER_TYPE)
11003 || TREE_SIDE_EFFECTS (arg1))
11004 return NULL_TREE;
11005 else
11006 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11007 ignore, fcode);
11008
11009 default:
11010 break;
11011 }
11012 return NULL_TREE;
11013 }
11014
11015 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11016 ARG2, and ARG3. IGNORE is true if the result of the function call is
11017 ignored. This function returns NULL_TREE if no simplification was
11018 possible. */
11019
11020 static tree
11021 fold_builtin_4 (location_t loc, tree fndecl,
11022 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11023 {
11024 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11025
11026 switch (fcode)
11027 {
11028 case BUILT_IN_MEMCPY_CHK:
11029 case BUILT_IN_MEMPCPY_CHK:
11030 case BUILT_IN_MEMMOVE_CHK:
11031 case BUILT_IN_MEMSET_CHK:
11032 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11033 NULL_TREE, ignore,
11034 DECL_FUNCTION_CODE (fndecl));
11035
11036 case BUILT_IN_STRNCPY_CHK:
11037 case BUILT_IN_STPNCPY_CHK:
11038 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11039 ignore, fcode);
11040
11041 case BUILT_IN_STRNCAT_CHK:
11042 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11043
11044 case BUILT_IN_SNPRINTF:
11045 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11046
11047 case BUILT_IN_FPRINTF_CHK:
11048 case BUILT_IN_VFPRINTF_CHK:
11049 if (!validate_arg (arg1, INTEGER_TYPE)
11050 || TREE_SIDE_EFFECTS (arg1))
11051 return NULL_TREE;
11052 else
11053 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11054 ignore, fcode);
11055 break;
11056
11057 default:
11058 break;
11059 }
11060 return NULL_TREE;
11061 }
11062
11063 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11064 arguments, where NARGS <= 4. IGNORE is true if the result of the
11065 function call is ignored. This function returns NULL_TREE if no
11066 simplification was possible. Note that this only folds builtins with
11067 fixed argument patterns. Foldings that do varargs-to-varargs
11068 transformations, or that match calls with more than 4 arguments,
11069 need to be handled with fold_builtin_varargs instead. */
11070
11071 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11072
11073 static tree
11074 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11075 {
11076 tree ret = NULL_TREE;
11077
11078 switch (nargs)
11079 {
11080 case 0:
11081 ret = fold_builtin_0 (loc, fndecl, ignore);
11082 break;
11083 case 1:
11084 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11085 break;
11086 case 2:
11087 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11088 break;
11089 case 3:
11090 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11091 break;
11092 case 4:
11093 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11094 ignore);
11095 break;
11096 default:
11097 break;
11098 }
11099 if (ret)
11100 {
11101 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11102 SET_EXPR_LOCATION (ret, loc);
11103 TREE_NO_WARNING (ret) = 1;
11104 return ret;
11105 }
11106 return NULL_TREE;
11107 }
11108
11109 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11110 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11111 of arguments in ARGS to be omitted. OLDNARGS is the number of
11112 elements in ARGS. */
11113
11114 static tree
11115 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11116 int skip, tree fndecl, int n, va_list newargs)
11117 {
11118 int nargs = oldnargs - skip + n;
11119 tree *buffer;
11120
11121 if (n > 0)
11122 {
11123 int i, j;
11124
11125 buffer = XALLOCAVEC (tree, nargs);
11126 for (i = 0; i < n; i++)
11127 buffer[i] = va_arg (newargs, tree);
11128 for (j = skip; j < oldnargs; j++, i++)
11129 buffer[i] = args[j];
11130 }
11131 else
11132 buffer = args + skip;
11133
11134 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11135 }
11136
11137 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11138 list ARGS along with N new arguments specified as the "..."
11139 parameters. SKIP is the number of arguments in ARGS to be omitted.
11140 OLDNARGS is the number of elements in ARGS. */
11141
11142 static tree
11143 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11144 int skip, tree fndecl, int n, ...)
11145 {
11146 va_list ap;
11147 tree t;
11148
11149 va_start (ap, n);
11150 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11151 va_end (ap);
11152
11153 return t;
11154 }
11155
11156 /* Return true if FNDECL shouldn't be folded right now.
11157 If a built-in function has an inline attribute always_inline
11158 wrapper, defer folding it after always_inline functions have
11159 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11160 might not be performed. */
11161
11162 bool
11163 avoid_folding_inline_builtin (tree fndecl)
11164 {
11165 return (DECL_DECLARED_INLINE_P (fndecl)
11166 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11167 && cfun
11168 && !cfun->always_inline_functions_inlined
11169 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11170 }
11171
11172 /* A wrapper function for builtin folding that prevents warnings for
11173 "statement without effect" and the like, caused by removing the
11174 call node earlier than the warning is generated. */
11175
11176 tree
11177 fold_call_expr (location_t loc, tree exp, bool ignore)
11178 {
11179 tree ret = NULL_TREE;
11180 tree fndecl = get_callee_fndecl (exp);
11181 if (fndecl
11182 && TREE_CODE (fndecl) == FUNCTION_DECL
11183 && DECL_BUILT_IN (fndecl)
11184 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11185 yet. Defer folding until we see all the arguments
11186 (after inlining). */
11187 && !CALL_EXPR_VA_ARG_PACK (exp))
11188 {
11189 int nargs = call_expr_nargs (exp);
11190
11191 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11192 instead last argument is __builtin_va_arg_pack (). Defer folding
11193 even in that case, until arguments are finalized. */
11194 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11195 {
11196 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11197 if (fndecl2
11198 && TREE_CODE (fndecl2) == FUNCTION_DECL
11199 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11200 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11201 return NULL_TREE;
11202 }
11203
11204 if (avoid_folding_inline_builtin (fndecl))
11205 return NULL_TREE;
11206
11207 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11208 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11209 CALL_EXPR_ARGP (exp), ignore);
11210 else
11211 {
11212 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11213 {
11214 tree *args = CALL_EXPR_ARGP (exp);
11215 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11216 }
11217 if (!ret)
11218 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11219 if (ret)
11220 return ret;
11221 }
11222 }
11223 return NULL_TREE;
11224 }
11225
11226 /* Conveniently construct a function call expression. FNDECL names the
11227 function to be called and N arguments are passed in the array
11228 ARGARRAY. */
11229
11230 tree
11231 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11232 {
11233 tree fntype = TREE_TYPE (fndecl);
11234 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11235
11236 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11237 }
11238
11239 /* Conveniently construct a function call expression. FNDECL names the
11240 function to be called and the arguments are passed in the vector
11241 VEC. */
11242
11243 tree
11244 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11245 {
11246 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11247 vec_safe_address (vec));
11248 }
11249
11250
11251 /* Conveniently construct a function call expression. FNDECL names the
11252 function to be called, N is the number of arguments, and the "..."
11253 parameters are the argument expressions. */
11254
11255 tree
11256 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11257 {
11258 va_list ap;
11259 tree *argarray = XALLOCAVEC (tree, n);
11260 int i;
11261
11262 va_start (ap, n);
11263 for (i = 0; i < n; i++)
11264 argarray[i] = va_arg (ap, tree);
11265 va_end (ap);
11266 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11267 }
11268
11269 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11270 varargs macros aren't supported by all bootstrap compilers. */
11271
11272 tree
11273 build_call_expr (tree fndecl, int n, ...)
11274 {
11275 va_list ap;
11276 tree *argarray = XALLOCAVEC (tree, n);
11277 int i;
11278
11279 va_start (ap, n);
11280 for (i = 0; i < n; i++)
11281 argarray[i] = va_arg (ap, tree);
11282 va_end (ap);
11283 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11284 }
11285
11286 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11287 N arguments are passed in the array ARGARRAY. */
11288
11289 tree
11290 fold_builtin_call_array (location_t loc, tree type,
11291 tree fn,
11292 int n,
11293 tree *argarray)
11294 {
11295 tree ret = NULL_TREE;
11296 tree exp;
11297
11298 if (TREE_CODE (fn) == ADDR_EXPR)
11299 {
11300 tree fndecl = TREE_OPERAND (fn, 0);
11301 if (TREE_CODE (fndecl) == FUNCTION_DECL
11302 && DECL_BUILT_IN (fndecl))
11303 {
11304 /* If last argument is __builtin_va_arg_pack (), arguments to this
11305 function are not finalized yet. Defer folding until they are. */
11306 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11307 {
11308 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11309 if (fndecl2
11310 && TREE_CODE (fndecl2) == FUNCTION_DECL
11311 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11312 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11313 return build_call_array_loc (loc, type, fn, n, argarray);
11314 }
11315 if (avoid_folding_inline_builtin (fndecl))
11316 return build_call_array_loc (loc, type, fn, n, argarray);
11317 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11318 {
11319 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11320 if (ret)
11321 return ret;
11322
11323 return build_call_array_loc (loc, type, fn, n, argarray);
11324 }
11325 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11326 {
11327 /* First try the transformations that don't require consing up
11328 an exp. */
11329 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11330 if (ret)
11331 return ret;
11332 }
11333
11334 /* If we got this far, we need to build an exp. */
11335 exp = build_call_array_loc (loc, type, fn, n, argarray);
11336 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11337 return ret ? ret : exp;
11338 }
11339 }
11340
11341 return build_call_array_loc (loc, type, fn, n, argarray);
11342 }
11343
11344 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11345 along with N new arguments specified as the "..." parameters. SKIP
11346 is the number of arguments in EXP to be omitted. This function is used
11347 to do varargs-to-varargs transformations. */
11348
11349 static tree
11350 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11351 {
11352 va_list ap;
11353 tree t;
11354
11355 va_start (ap, n);
11356 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11357 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11358 va_end (ap);
11359
11360 return t;
11361 }
11362
11363 /* Validate a single argument ARG against a tree code CODE representing
11364 a type. */
11365
11366 static bool
11367 validate_arg (const_tree arg, enum tree_code code)
11368 {
11369 if (!arg)
11370 return false;
11371 else if (code == POINTER_TYPE)
11372 return POINTER_TYPE_P (TREE_TYPE (arg));
11373 else if (code == INTEGER_TYPE)
11374 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11375 return code == TREE_CODE (TREE_TYPE (arg));
11376 }
11377
11378 /* This function validates the types of a function call argument list
11379 against a specified list of tree_codes. If the last specifier is a 0,
11380 that represents an ellipses, otherwise the last specifier must be a
11381 VOID_TYPE.
11382
11383 This is the GIMPLE version of validate_arglist. Eventually we want to
11384 completely convert builtins.c to work from GIMPLEs and the tree based
11385 validate_arglist will then be removed. */
11386
11387 bool
11388 validate_gimple_arglist (const_gimple call, ...)
11389 {
11390 enum tree_code code;
11391 bool res = 0;
11392 va_list ap;
11393 const_tree arg;
11394 size_t i;
11395
11396 va_start (ap, call);
11397 i = 0;
11398
11399 do
11400 {
11401 code = (enum tree_code) va_arg (ap, int);
11402 switch (code)
11403 {
11404 case 0:
11405 /* This signifies an ellipses, any further arguments are all ok. */
11406 res = true;
11407 goto end;
11408 case VOID_TYPE:
11409 /* This signifies an endlink, if no arguments remain, return
11410 true, otherwise return false. */
11411 res = (i == gimple_call_num_args (call));
11412 goto end;
11413 default:
11414 /* If no parameters remain or the parameter's code does not
11415 match the specified code, return false. Otherwise continue
11416 checking any remaining arguments. */
11417 arg = gimple_call_arg (call, i++);
11418 if (!validate_arg (arg, code))
11419 goto end;
11420 break;
11421 }
11422 }
11423 while (1);
11424
11425 /* We need gotos here since we can only have one VA_CLOSE in a
11426 function. */
11427 end: ;
11428 va_end (ap);
11429
11430 return res;
11431 }
11432
11433 /* Default target-specific builtin expander that does nothing. */
11434
11435 rtx
11436 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11437 rtx target ATTRIBUTE_UNUSED,
11438 rtx subtarget ATTRIBUTE_UNUSED,
11439 enum machine_mode mode ATTRIBUTE_UNUSED,
11440 int ignore ATTRIBUTE_UNUSED)
11441 {
11442 return NULL_RTX;
11443 }
11444
11445 /* Returns true is EXP represents data that would potentially reside
11446 in a readonly section. */
11447
11448 static bool
11449 readonly_data_expr (tree exp)
11450 {
11451 STRIP_NOPS (exp);
11452
11453 if (TREE_CODE (exp) != ADDR_EXPR)
11454 return false;
11455
11456 exp = get_base_address (TREE_OPERAND (exp, 0));
11457 if (!exp)
11458 return false;
11459
11460 /* Make sure we call decl_readonly_section only for trees it
11461 can handle (since it returns true for everything it doesn't
11462 understand). */
11463 if (TREE_CODE (exp) == STRING_CST
11464 || TREE_CODE (exp) == CONSTRUCTOR
11465 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11466 return decl_readonly_section (exp, 0);
11467 else
11468 return false;
11469 }
11470
11471 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11472 to the call, and TYPE is its return type.
11473
11474 Return NULL_TREE if no simplification was possible, otherwise return the
11475 simplified form of the call as a tree.
11476
11477 The simplified form may be a constant or other expression which
11478 computes the same value, but in a more efficient manner (including
11479 calls to other builtin functions).
11480
11481 The call may contain arguments which need to be evaluated, but
11482 which are not useful to determine the result of the call. In
11483 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11484 COMPOUND_EXPR will be an argument which must be evaluated.
11485 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11486 COMPOUND_EXPR in the chain will contain the tree for the simplified
11487 form of the builtin function call. */
11488
11489 static tree
11490 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11491 {
11492 if (!validate_arg (s1, POINTER_TYPE)
11493 || !validate_arg (s2, POINTER_TYPE))
11494 return NULL_TREE;
11495 else
11496 {
11497 tree fn;
11498 const char *p1, *p2;
11499
11500 p2 = c_getstr (s2);
11501 if (p2 == NULL)
11502 return NULL_TREE;
11503
11504 p1 = c_getstr (s1);
11505 if (p1 != NULL)
11506 {
11507 const char *r = strstr (p1, p2);
11508 tree tem;
11509
11510 if (r == NULL)
11511 return build_int_cst (TREE_TYPE (s1), 0);
11512
11513 /* Return an offset into the constant string argument. */
11514 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11515 return fold_convert_loc (loc, type, tem);
11516 }
11517
11518 /* The argument is const char *, and the result is char *, so we need
11519 a type conversion here to avoid a warning. */
11520 if (p2[0] == '\0')
11521 return fold_convert_loc (loc, type, s1);
11522
11523 if (p2[1] != '\0')
11524 return NULL_TREE;
11525
11526 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11527 if (!fn)
11528 return NULL_TREE;
11529
11530 /* New argument list transforming strstr(s1, s2) to
11531 strchr(s1, s2[0]). */
11532 return build_call_expr_loc (loc, fn, 2, s1,
11533 build_int_cst (integer_type_node, p2[0]));
11534 }
11535 }
11536
11537 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11538 the call, and TYPE is its return type.
11539
11540 Return NULL_TREE if no simplification was possible, otherwise return the
11541 simplified form of the call as a tree.
11542
11543 The simplified form may be a constant or other expression which
11544 computes the same value, but in a more efficient manner (including
11545 calls to other builtin functions).
11546
11547 The call may contain arguments which need to be evaluated, but
11548 which are not useful to determine the result of the call. In
11549 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11550 COMPOUND_EXPR will be an argument which must be evaluated.
11551 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11552 COMPOUND_EXPR in the chain will contain the tree for the simplified
11553 form of the builtin function call. */
11554
11555 static tree
11556 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11557 {
11558 if (!validate_arg (s1, POINTER_TYPE)
11559 || !validate_arg (s2, INTEGER_TYPE))
11560 return NULL_TREE;
11561 else
11562 {
11563 const char *p1;
11564
11565 if (TREE_CODE (s2) != INTEGER_CST)
11566 return NULL_TREE;
11567
11568 p1 = c_getstr (s1);
11569 if (p1 != NULL)
11570 {
11571 char c;
11572 const char *r;
11573 tree tem;
11574
11575 if (target_char_cast (s2, &c))
11576 return NULL_TREE;
11577
11578 r = strchr (p1, c);
11579
11580 if (r == NULL)
11581 return build_int_cst (TREE_TYPE (s1), 0);
11582
11583 /* Return an offset into the constant string argument. */
11584 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11585 return fold_convert_loc (loc, type, tem);
11586 }
11587 return NULL_TREE;
11588 }
11589 }
11590
11591 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11592 the call, and TYPE is its return type.
11593
11594 Return NULL_TREE if no simplification was possible, otherwise return the
11595 simplified form of the call as a tree.
11596
11597 The simplified form may be a constant or other expression which
11598 computes the same value, but in a more efficient manner (including
11599 calls to other builtin functions).
11600
11601 The call may contain arguments which need to be evaluated, but
11602 which are not useful to determine the result of the call. In
11603 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11604 COMPOUND_EXPR will be an argument which must be evaluated.
11605 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11606 COMPOUND_EXPR in the chain will contain the tree for the simplified
11607 form of the builtin function call. */
11608
11609 static tree
11610 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11611 {
11612 if (!validate_arg (s1, POINTER_TYPE)
11613 || !validate_arg (s2, INTEGER_TYPE))
11614 return NULL_TREE;
11615 else
11616 {
11617 tree fn;
11618 const char *p1;
11619
11620 if (TREE_CODE (s2) != INTEGER_CST)
11621 return NULL_TREE;
11622
11623 p1 = c_getstr (s1);
11624 if (p1 != NULL)
11625 {
11626 char c;
11627 const char *r;
11628 tree tem;
11629
11630 if (target_char_cast (s2, &c))
11631 return NULL_TREE;
11632
11633 r = strrchr (p1, c);
11634
11635 if (r == NULL)
11636 return build_int_cst (TREE_TYPE (s1), 0);
11637
11638 /* Return an offset into the constant string argument. */
11639 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11640 return fold_convert_loc (loc, type, tem);
11641 }
11642
11643 if (! integer_zerop (s2))
11644 return NULL_TREE;
11645
11646 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11647 if (!fn)
11648 return NULL_TREE;
11649
11650 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11651 return build_call_expr_loc (loc, fn, 2, s1, s2);
11652 }
11653 }
11654
11655 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11656 to the call, and TYPE is its return type.
11657
11658 Return NULL_TREE if no simplification was possible, otherwise return the
11659 simplified form of the call as a tree.
11660
11661 The simplified form may be a constant or other expression which
11662 computes the same value, but in a more efficient manner (including
11663 calls to other builtin functions).
11664
11665 The call may contain arguments which need to be evaluated, but
11666 which are not useful to determine the result of the call. In
11667 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11668 COMPOUND_EXPR will be an argument which must be evaluated.
11669 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11670 COMPOUND_EXPR in the chain will contain the tree for the simplified
11671 form of the builtin function call. */
11672
11673 static tree
11674 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11675 {
11676 if (!validate_arg (s1, POINTER_TYPE)
11677 || !validate_arg (s2, POINTER_TYPE))
11678 return NULL_TREE;
11679 else
11680 {
11681 tree fn;
11682 const char *p1, *p2;
11683
11684 p2 = c_getstr (s2);
11685 if (p2 == NULL)
11686 return NULL_TREE;
11687
11688 p1 = c_getstr (s1);
11689 if (p1 != NULL)
11690 {
11691 const char *r = strpbrk (p1, p2);
11692 tree tem;
11693
11694 if (r == NULL)
11695 return build_int_cst (TREE_TYPE (s1), 0);
11696
11697 /* Return an offset into the constant string argument. */
11698 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11699 return fold_convert_loc (loc, type, tem);
11700 }
11701
11702 if (p2[0] == '\0')
11703 /* strpbrk(x, "") == NULL.
11704 Evaluate and ignore s1 in case it had side-effects. */
11705 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11706
11707 if (p2[1] != '\0')
11708 return NULL_TREE; /* Really call strpbrk. */
11709
11710 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11711 if (!fn)
11712 return NULL_TREE;
11713
11714 /* New argument list transforming strpbrk(s1, s2) to
11715 strchr(s1, s2[0]). */
11716 return build_call_expr_loc (loc, fn, 2, s1,
11717 build_int_cst (integer_type_node, p2[0]));
11718 }
11719 }
11720
11721 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11722 to the call.
11723
11724 Return NULL_TREE if no simplification was possible, otherwise return the
11725 simplified form of the call as a tree.
11726
11727 The simplified form may be a constant or other expression which
11728 computes the same value, but in a more efficient manner (including
11729 calls to other builtin functions).
11730
11731 The call may contain arguments which need to be evaluated, but
11732 which are not useful to determine the result of the call. In
11733 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11734 COMPOUND_EXPR will be an argument which must be evaluated.
11735 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11736 COMPOUND_EXPR in the chain will contain the tree for the simplified
11737 form of the builtin function call. */
11738
11739 static tree
11740 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11741 {
11742 if (!validate_arg (dst, POINTER_TYPE)
11743 || !validate_arg (src, POINTER_TYPE))
11744 return NULL_TREE;
11745 else
11746 {
11747 const char *p = c_getstr (src);
11748
11749 /* If the string length is zero, return the dst parameter. */
11750 if (p && *p == '\0')
11751 return dst;
11752
11753 if (optimize_insn_for_speed_p ())
11754 {
11755 /* See if we can store by pieces into (dst + strlen(dst)). */
11756 tree newdst, call;
11757 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11758 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11759
11760 if (!strlen_fn || !strcpy_fn)
11761 return NULL_TREE;
11762
11763 /* If we don't have a movstr we don't want to emit an strcpy
11764 call. We have to do that if the length of the source string
11765 isn't computable (in that case we can use memcpy probably
11766 later expanding to a sequence of mov instructions). If we
11767 have movstr instructions we can emit strcpy calls. */
11768 if (!HAVE_movstr)
11769 {
11770 tree len = c_strlen (src, 1);
11771 if (! len || TREE_SIDE_EFFECTS (len))
11772 return NULL_TREE;
11773 }
11774
11775 /* Stabilize the argument list. */
11776 dst = builtin_save_expr (dst);
11777
11778 /* Create strlen (dst). */
11779 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11780 /* Create (dst p+ strlen (dst)). */
11781
11782 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11783 newdst = builtin_save_expr (newdst);
11784
11785 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11786 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11787 }
11788 return NULL_TREE;
11789 }
11790 }
11791
11792 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11793 arguments to the call.
11794
11795 Return NULL_TREE if no simplification was possible, otherwise return the
11796 simplified form of the call as a tree.
11797
11798 The simplified form may be a constant or other expression which
11799 computes the same value, but in a more efficient manner (including
11800 calls to other builtin functions).
11801
11802 The call may contain arguments which need to be evaluated, but
11803 which are not useful to determine the result of the call. In
11804 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11805 COMPOUND_EXPR will be an argument which must be evaluated.
11806 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11807 COMPOUND_EXPR in the chain will contain the tree for the simplified
11808 form of the builtin function call. */
11809
11810 static tree
11811 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11812 {
11813 if (!validate_arg (dst, POINTER_TYPE)
11814 || !validate_arg (src, POINTER_TYPE)
11815 || !validate_arg (len, INTEGER_TYPE))
11816 return NULL_TREE;
11817 else
11818 {
11819 const char *p = c_getstr (src);
11820
11821 /* If the requested length is zero, or the src parameter string
11822 length is zero, return the dst parameter. */
11823 if (integer_zerop (len) || (p && *p == '\0'))
11824 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11825
11826 /* If the requested len is greater than or equal to the string
11827 length, call strcat. */
11828 if (TREE_CODE (len) == INTEGER_CST && p
11829 && compare_tree_int (len, strlen (p)) >= 0)
11830 {
11831 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11832
11833 /* If the replacement _DECL isn't initialized, don't do the
11834 transformation. */
11835 if (!fn)
11836 return NULL_TREE;
11837
11838 return build_call_expr_loc (loc, fn, 2, dst, src);
11839 }
11840 return NULL_TREE;
11841 }
11842 }
11843
11844 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11845 to the call.
11846
11847 Return NULL_TREE if no simplification was possible, otherwise return the
11848 simplified form of the call as a tree.
11849
11850 The simplified form may be a constant or other expression which
11851 computes the same value, but in a more efficient manner (including
11852 calls to other builtin functions).
11853
11854 The call may contain arguments which need to be evaluated, but
11855 which are not useful to determine the result of the call. In
11856 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11857 COMPOUND_EXPR will be an argument which must be evaluated.
11858 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11859 COMPOUND_EXPR in the chain will contain the tree for the simplified
11860 form of the builtin function call. */
11861
11862 static tree
11863 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11864 {
11865 if (!validate_arg (s1, POINTER_TYPE)
11866 || !validate_arg (s2, POINTER_TYPE))
11867 return NULL_TREE;
11868 else
11869 {
11870 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11871
11872 /* If both arguments are constants, evaluate at compile-time. */
11873 if (p1 && p2)
11874 {
11875 const size_t r = strspn (p1, p2);
11876 return build_int_cst (size_type_node, r);
11877 }
11878
11879 /* If either argument is "", return NULL_TREE. */
11880 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11881 /* Evaluate and ignore both arguments in case either one has
11882 side-effects. */
11883 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11884 s1, s2);
11885 return NULL_TREE;
11886 }
11887 }
11888
11889 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11890 to the call.
11891
11892 Return NULL_TREE if no simplification was possible, otherwise return the
11893 simplified form of the call as a tree.
11894
11895 The simplified form may be a constant or other expression which
11896 computes the same value, but in a more efficient manner (including
11897 calls to other builtin functions).
11898
11899 The call may contain arguments which need to be evaluated, but
11900 which are not useful to determine the result of the call. In
11901 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11902 COMPOUND_EXPR will be an argument which must be evaluated.
11903 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11904 COMPOUND_EXPR in the chain will contain the tree for the simplified
11905 form of the builtin function call. */
11906
11907 static tree
11908 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11909 {
11910 if (!validate_arg (s1, POINTER_TYPE)
11911 || !validate_arg (s2, POINTER_TYPE))
11912 return NULL_TREE;
11913 else
11914 {
11915 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11916
11917 /* If both arguments are constants, evaluate at compile-time. */
11918 if (p1 && p2)
11919 {
11920 const size_t r = strcspn (p1, p2);
11921 return build_int_cst (size_type_node, r);
11922 }
11923
11924 /* If the first argument is "", return NULL_TREE. */
11925 if (p1 && *p1 == '\0')
11926 {
11927 /* Evaluate and ignore argument s2 in case it has
11928 side-effects. */
11929 return omit_one_operand_loc (loc, size_type_node,
11930 size_zero_node, s2);
11931 }
11932
11933 /* If the second argument is "", return __builtin_strlen(s1). */
11934 if (p2 && *p2 == '\0')
11935 {
11936 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11937
11938 /* If the replacement _DECL isn't initialized, don't do the
11939 transformation. */
11940 if (!fn)
11941 return NULL_TREE;
11942
11943 return build_call_expr_loc (loc, fn, 1, s1);
11944 }
11945 return NULL_TREE;
11946 }
11947 }
11948
11949 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11950 to the call. IGNORE is true if the value returned
11951 by the builtin will be ignored. UNLOCKED is true is true if this
11952 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11953 the known length of the string. Return NULL_TREE if no simplification
11954 was possible. */
11955
11956 tree
11957 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11958 bool ignore, bool unlocked, tree len)
11959 {
11960 /* If we're using an unlocked function, assume the other unlocked
11961 functions exist explicitly. */
11962 tree const fn_fputc = (unlocked
11963 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11964 : builtin_decl_implicit (BUILT_IN_FPUTC));
11965 tree const fn_fwrite = (unlocked
11966 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11967 : builtin_decl_implicit (BUILT_IN_FWRITE));
11968
11969 /* If the return value is used, don't do the transformation. */
11970 if (!ignore)
11971 return NULL_TREE;
11972
11973 /* Verify the arguments in the original call. */
11974 if (!validate_arg (arg0, POINTER_TYPE)
11975 || !validate_arg (arg1, POINTER_TYPE))
11976 return NULL_TREE;
11977
11978 if (! len)
11979 len = c_strlen (arg0, 0);
11980
11981 /* Get the length of the string passed to fputs. If the length
11982 can't be determined, punt. */
11983 if (!len
11984 || TREE_CODE (len) != INTEGER_CST)
11985 return NULL_TREE;
11986
11987 switch (compare_tree_int (len, 1))
11988 {
11989 case -1: /* length is 0, delete the call entirely . */
11990 return omit_one_operand_loc (loc, integer_type_node,
11991 integer_zero_node, arg1);;
11992
11993 case 0: /* length is 1, call fputc. */
11994 {
11995 const char *p = c_getstr (arg0);
11996
11997 if (p != NULL)
11998 {
11999 if (fn_fputc)
12000 return build_call_expr_loc (loc, fn_fputc, 2,
12001 build_int_cst
12002 (integer_type_node, p[0]), arg1);
12003 else
12004 return NULL_TREE;
12005 }
12006 }
12007 /* FALLTHROUGH */
12008 case 1: /* length is greater than 1, call fwrite. */
12009 {
12010 /* If optimizing for size keep fputs. */
12011 if (optimize_function_for_size_p (cfun))
12012 return NULL_TREE;
12013 /* New argument list transforming fputs(string, stream) to
12014 fwrite(string, 1, len, stream). */
12015 if (fn_fwrite)
12016 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12017 size_one_node, len, arg1);
12018 else
12019 return NULL_TREE;
12020 }
12021 default:
12022 gcc_unreachable ();
12023 }
12024 return NULL_TREE;
12025 }
12026
12027 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12028 produced. False otherwise. This is done so that we don't output the error
12029 or warning twice or three times. */
12030
12031 bool
12032 fold_builtin_next_arg (tree exp, bool va_start_p)
12033 {
12034 tree fntype = TREE_TYPE (current_function_decl);
12035 int nargs = call_expr_nargs (exp);
12036 tree arg;
12037 /* There is good chance the current input_location points inside the
12038 definition of the va_start macro (perhaps on the token for
12039 builtin) in a system header, so warnings will not be emitted.
12040 Use the location in real source code. */
12041 source_location current_location =
12042 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12043 NULL);
12044
12045 if (!stdarg_p (fntype))
12046 {
12047 error ("%<va_start%> used in function with fixed args");
12048 return true;
12049 }
12050
12051 if (va_start_p)
12052 {
12053 if (va_start_p && (nargs != 2))
12054 {
12055 error ("wrong number of arguments to function %<va_start%>");
12056 return true;
12057 }
12058 arg = CALL_EXPR_ARG (exp, 1);
12059 }
12060 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12061 when we checked the arguments and if needed issued a warning. */
12062 else
12063 {
12064 if (nargs == 0)
12065 {
12066 /* Evidently an out of date version of <stdarg.h>; can't validate
12067 va_start's second argument, but can still work as intended. */
12068 warning_at (current_location,
12069 OPT_Wvarargs,
12070 "%<__builtin_next_arg%> called without an argument");
12071 return true;
12072 }
12073 else if (nargs > 1)
12074 {
12075 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12076 return true;
12077 }
12078 arg = CALL_EXPR_ARG (exp, 0);
12079 }
12080
12081 if (TREE_CODE (arg) == SSA_NAME)
12082 arg = SSA_NAME_VAR (arg);
12083
12084 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12085 or __builtin_next_arg (0) the first time we see it, after checking
12086 the arguments and if needed issuing a warning. */
12087 if (!integer_zerop (arg))
12088 {
12089 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12090
12091 /* Strip off all nops for the sake of the comparison. This
12092 is not quite the same as STRIP_NOPS. It does more.
12093 We must also strip off INDIRECT_EXPR for C++ reference
12094 parameters. */
12095 while (CONVERT_EXPR_P (arg)
12096 || TREE_CODE (arg) == INDIRECT_REF)
12097 arg = TREE_OPERAND (arg, 0);
12098 if (arg != last_parm)
12099 {
12100 /* FIXME: Sometimes with the tree optimizers we can get the
12101 not the last argument even though the user used the last
12102 argument. We just warn and set the arg to be the last
12103 argument so that we will get wrong-code because of
12104 it. */
12105 warning_at (current_location,
12106 OPT_Wvarargs,
12107 "second parameter of %<va_start%> not last named argument");
12108 }
12109
12110 /* Undefined by C99 7.15.1.4p4 (va_start):
12111 "If the parameter parmN is declared with the register storage
12112 class, with a function or array type, or with a type that is
12113 not compatible with the type that results after application of
12114 the default argument promotions, the behavior is undefined."
12115 */
12116 else if (DECL_REGISTER (arg))
12117 {
12118 warning_at (current_location,
12119 OPT_Wvarargs,
12120 "undefined behaviour when second parameter of "
12121 "%<va_start%> is declared with %<register%> storage");
12122 }
12123
12124 /* We want to verify the second parameter just once before the tree
12125 optimizers are run and then avoid keeping it in the tree,
12126 as otherwise we could warn even for correct code like:
12127 void foo (int i, ...)
12128 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12129 if (va_start_p)
12130 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12131 else
12132 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12133 }
12134 return false;
12135 }
12136
12137
12138 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12139 ORIG may be null if this is a 2-argument call. We don't attempt to
12140 simplify calls with more than 3 arguments.
12141
12142 Return NULL_TREE if no simplification was possible, otherwise return the
12143 simplified form of the call as a tree. If IGNORED is true, it means that
12144 the caller does not use the returned value of the function. */
12145
12146 static tree
12147 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12148 tree orig, int ignored)
12149 {
12150 tree call, retval;
12151 const char *fmt_str = NULL;
12152
12153 /* Verify the required arguments in the original call. We deal with two
12154 types of sprintf() calls: 'sprintf (str, fmt)' and
12155 'sprintf (dest, "%s", orig)'. */
12156 if (!validate_arg (dest, POINTER_TYPE)
12157 || !validate_arg (fmt, POINTER_TYPE))
12158 return NULL_TREE;
12159 if (orig && !validate_arg (orig, POINTER_TYPE))
12160 return NULL_TREE;
12161
12162 /* Check whether the format is a literal string constant. */
12163 fmt_str = c_getstr (fmt);
12164 if (fmt_str == NULL)
12165 return NULL_TREE;
12166
12167 call = NULL_TREE;
12168 retval = NULL_TREE;
12169
12170 if (!init_target_chars ())
12171 return NULL_TREE;
12172
12173 /* If the format doesn't contain % args or %%, use strcpy. */
12174 if (strchr (fmt_str, target_percent) == NULL)
12175 {
12176 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12177
12178 if (!fn)
12179 return NULL_TREE;
12180
12181 /* Don't optimize sprintf (buf, "abc", ptr++). */
12182 if (orig)
12183 return NULL_TREE;
12184
12185 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12186 'format' is known to contain no % formats. */
12187 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12188 if (!ignored)
12189 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12190 }
12191
12192 /* If the format is "%s", use strcpy if the result isn't used. */
12193 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12194 {
12195 tree fn;
12196 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12197
12198 if (!fn)
12199 return NULL_TREE;
12200
12201 /* Don't crash on sprintf (str1, "%s"). */
12202 if (!orig)
12203 return NULL_TREE;
12204
12205 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12206 if (!ignored)
12207 {
12208 retval = c_strlen (orig, 1);
12209 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12210 return NULL_TREE;
12211 }
12212 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12213 }
12214
12215 if (call && retval)
12216 {
12217 retval = fold_convert_loc
12218 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12219 retval);
12220 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12221 }
12222 else
12223 return call;
12224 }
12225
12226 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12227 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12228 attempt to simplify calls with more than 4 arguments.
12229
12230 Return NULL_TREE if no simplification was possible, otherwise return the
12231 simplified form of the call as a tree. If IGNORED is true, it means that
12232 the caller does not use the returned value of the function. */
12233
12234 static tree
12235 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12236 tree orig, int ignored)
12237 {
12238 tree call, retval;
12239 const char *fmt_str = NULL;
12240 unsigned HOST_WIDE_INT destlen;
12241
12242 /* Verify the required arguments in the original call. We deal with two
12243 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12244 'snprintf (dest, cst, "%s", orig)'. */
12245 if (!validate_arg (dest, POINTER_TYPE)
12246 || !validate_arg (destsize, INTEGER_TYPE)
12247 || !validate_arg (fmt, POINTER_TYPE))
12248 return NULL_TREE;
12249 if (orig && !validate_arg (orig, POINTER_TYPE))
12250 return NULL_TREE;
12251
12252 if (!tree_fits_uhwi_p (destsize))
12253 return NULL_TREE;
12254
12255 /* Check whether the format is a literal string constant. */
12256 fmt_str = c_getstr (fmt);
12257 if (fmt_str == NULL)
12258 return NULL_TREE;
12259
12260 call = NULL_TREE;
12261 retval = NULL_TREE;
12262
12263 if (!init_target_chars ())
12264 return NULL_TREE;
12265
12266 destlen = tree_to_uhwi (destsize);
12267
12268 /* If the format doesn't contain % args or %%, use strcpy. */
12269 if (strchr (fmt_str, target_percent) == NULL)
12270 {
12271 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12272 size_t len = strlen (fmt_str);
12273
12274 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12275 if (orig)
12276 return NULL_TREE;
12277
12278 /* We could expand this as
12279 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12280 or to
12281 memcpy (str, fmt_with_nul_at_cstm1, cst);
12282 but in the former case that might increase code size
12283 and in the latter case grow .rodata section too much.
12284 So punt for now. */
12285 if (len >= destlen)
12286 return NULL_TREE;
12287
12288 if (!fn)
12289 return NULL_TREE;
12290
12291 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12292 'format' is known to contain no % formats and
12293 strlen (fmt) < cst. */
12294 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12295
12296 if (!ignored)
12297 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12298 }
12299
12300 /* If the format is "%s", use strcpy if the result isn't used. */
12301 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12302 {
12303 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12304 unsigned HOST_WIDE_INT origlen;
12305
12306 /* Don't crash on snprintf (str1, cst, "%s"). */
12307 if (!orig)
12308 return NULL_TREE;
12309
12310 retval = c_strlen (orig, 1);
12311 if (!retval || !tree_fits_uhwi_p (retval))
12312 return NULL_TREE;
12313
12314 origlen = tree_to_uhwi (retval);
12315 /* We could expand this as
12316 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12317 or to
12318 memcpy (str1, str2_with_nul_at_cstm1, cst);
12319 but in the former case that might increase code size
12320 and in the latter case grow .rodata section too much.
12321 So punt for now. */
12322 if (origlen >= destlen)
12323 return NULL_TREE;
12324
12325 /* Convert snprintf (str1, cst, "%s", str2) into
12326 strcpy (str1, str2) if strlen (str2) < cst. */
12327 if (!fn)
12328 return NULL_TREE;
12329
12330 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12331
12332 if (ignored)
12333 retval = NULL_TREE;
12334 }
12335
12336 if (call && retval)
12337 {
12338 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12339 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12340 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12341 }
12342 else
12343 return call;
12344 }
12345
12346 /* Expand a call EXP to __builtin_object_size. */
12347
12348 rtx
12349 expand_builtin_object_size (tree exp)
12350 {
12351 tree ost;
12352 int object_size_type;
12353 tree fndecl = get_callee_fndecl (exp);
12354
12355 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12356 {
12357 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12358 exp, fndecl);
12359 expand_builtin_trap ();
12360 return const0_rtx;
12361 }
12362
12363 ost = CALL_EXPR_ARG (exp, 1);
12364 STRIP_NOPS (ost);
12365
12366 if (TREE_CODE (ost) != INTEGER_CST
12367 || tree_int_cst_sgn (ost) < 0
12368 || compare_tree_int (ost, 3) > 0)
12369 {
12370 error ("%Klast argument of %D is not integer constant between 0 and 3",
12371 exp, fndecl);
12372 expand_builtin_trap ();
12373 return const0_rtx;
12374 }
12375
12376 object_size_type = tree_to_shwi (ost);
12377
12378 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12379 }
12380
12381 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12382 FCODE is the BUILT_IN_* to use.
12383 Return NULL_RTX if we failed; the caller should emit a normal call,
12384 otherwise try to get the result in TARGET, if convenient (and in
12385 mode MODE if that's convenient). */
12386
12387 static rtx
12388 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12389 enum built_in_function fcode)
12390 {
12391 tree dest, src, len, size;
12392
12393 if (!validate_arglist (exp,
12394 POINTER_TYPE,
12395 fcode == BUILT_IN_MEMSET_CHK
12396 ? INTEGER_TYPE : POINTER_TYPE,
12397 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12398 return NULL_RTX;
12399
12400 dest = CALL_EXPR_ARG (exp, 0);
12401 src = CALL_EXPR_ARG (exp, 1);
12402 len = CALL_EXPR_ARG (exp, 2);
12403 size = CALL_EXPR_ARG (exp, 3);
12404
12405 if (! tree_fits_uhwi_p (size))
12406 return NULL_RTX;
12407
12408 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12409 {
12410 tree fn;
12411
12412 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12413 {
12414 warning_at (tree_nonartificial_location (exp),
12415 0, "%Kcall to %D will always overflow destination buffer",
12416 exp, get_callee_fndecl (exp));
12417 return NULL_RTX;
12418 }
12419
12420 fn = NULL_TREE;
12421 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12422 mem{cpy,pcpy,move,set} is available. */
12423 switch (fcode)
12424 {
12425 case BUILT_IN_MEMCPY_CHK:
12426 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12427 break;
12428 case BUILT_IN_MEMPCPY_CHK:
12429 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12430 break;
12431 case BUILT_IN_MEMMOVE_CHK:
12432 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12433 break;
12434 case BUILT_IN_MEMSET_CHK:
12435 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12436 break;
12437 default:
12438 break;
12439 }
12440
12441 if (! fn)
12442 return NULL_RTX;
12443
12444 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12445 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12446 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12447 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12448 }
12449 else if (fcode == BUILT_IN_MEMSET_CHK)
12450 return NULL_RTX;
12451 else
12452 {
12453 unsigned int dest_align = get_pointer_alignment (dest);
12454
12455 /* If DEST is not a pointer type, call the normal function. */
12456 if (dest_align == 0)
12457 return NULL_RTX;
12458
12459 /* If SRC and DEST are the same (and not volatile), do nothing. */
12460 if (operand_equal_p (src, dest, 0))
12461 {
12462 tree expr;
12463
12464 if (fcode != BUILT_IN_MEMPCPY_CHK)
12465 {
12466 /* Evaluate and ignore LEN in case it has side-effects. */
12467 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12468 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12469 }
12470
12471 expr = fold_build_pointer_plus (dest, len);
12472 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12473 }
12474
12475 /* __memmove_chk special case. */
12476 if (fcode == BUILT_IN_MEMMOVE_CHK)
12477 {
12478 unsigned int src_align = get_pointer_alignment (src);
12479
12480 if (src_align == 0)
12481 return NULL_RTX;
12482
12483 /* If src is categorized for a readonly section we can use
12484 normal __memcpy_chk. */
12485 if (readonly_data_expr (src))
12486 {
12487 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12488 if (!fn)
12489 return NULL_RTX;
12490 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12491 dest, src, len, size);
12492 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12493 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12494 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12495 }
12496 }
12497 return NULL_RTX;
12498 }
12499 }
12500
12501 /* Emit warning if a buffer overflow is detected at compile time. */
12502
12503 static void
12504 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12505 {
12506 int is_strlen = 0;
12507 tree len, size;
12508 location_t loc = tree_nonartificial_location (exp);
12509
12510 switch (fcode)
12511 {
12512 case BUILT_IN_STRCPY_CHK:
12513 case BUILT_IN_STPCPY_CHK:
12514 /* For __strcat_chk the warning will be emitted only if overflowing
12515 by at least strlen (dest) + 1 bytes. */
12516 case BUILT_IN_STRCAT_CHK:
12517 len = CALL_EXPR_ARG (exp, 1);
12518 size = CALL_EXPR_ARG (exp, 2);
12519 is_strlen = 1;
12520 break;
12521 case BUILT_IN_STRNCAT_CHK:
12522 case BUILT_IN_STRNCPY_CHK:
12523 case BUILT_IN_STPNCPY_CHK:
12524 len = CALL_EXPR_ARG (exp, 2);
12525 size = CALL_EXPR_ARG (exp, 3);
12526 break;
12527 case BUILT_IN_SNPRINTF_CHK:
12528 case BUILT_IN_VSNPRINTF_CHK:
12529 len = CALL_EXPR_ARG (exp, 1);
12530 size = CALL_EXPR_ARG (exp, 3);
12531 break;
12532 default:
12533 gcc_unreachable ();
12534 }
12535
12536 if (!len || !size)
12537 return;
12538
12539 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12540 return;
12541
12542 if (is_strlen)
12543 {
12544 len = c_strlen (len, 1);
12545 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12546 return;
12547 }
12548 else if (fcode == BUILT_IN_STRNCAT_CHK)
12549 {
12550 tree src = CALL_EXPR_ARG (exp, 1);
12551 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12552 return;
12553 src = c_strlen (src, 1);
12554 if (! src || ! tree_fits_uhwi_p (src))
12555 {
12556 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12557 exp, get_callee_fndecl (exp));
12558 return;
12559 }
12560 else if (tree_int_cst_lt (src, size))
12561 return;
12562 }
12563 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12564 return;
12565
12566 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12567 exp, get_callee_fndecl (exp));
12568 }
12569
12570 /* Emit warning if a buffer overflow is detected at compile time
12571 in __sprintf_chk/__vsprintf_chk calls. */
12572
12573 static void
12574 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12575 {
12576 tree size, len, fmt;
12577 const char *fmt_str;
12578 int nargs = call_expr_nargs (exp);
12579
12580 /* Verify the required arguments in the original call. */
12581
12582 if (nargs < 4)
12583 return;
12584 size = CALL_EXPR_ARG (exp, 2);
12585 fmt = CALL_EXPR_ARG (exp, 3);
12586
12587 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12588 return;
12589
12590 /* Check whether the format is a literal string constant. */
12591 fmt_str = c_getstr (fmt);
12592 if (fmt_str == NULL)
12593 return;
12594
12595 if (!init_target_chars ())
12596 return;
12597
12598 /* If the format doesn't contain % args or %%, we know its size. */
12599 if (strchr (fmt_str, target_percent) == 0)
12600 len = build_int_cstu (size_type_node, strlen (fmt_str));
12601 /* If the format is "%s" and first ... argument is a string literal,
12602 we know it too. */
12603 else if (fcode == BUILT_IN_SPRINTF_CHK
12604 && strcmp (fmt_str, target_percent_s) == 0)
12605 {
12606 tree arg;
12607
12608 if (nargs < 5)
12609 return;
12610 arg = CALL_EXPR_ARG (exp, 4);
12611 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12612 return;
12613
12614 len = c_strlen (arg, 1);
12615 if (!len || ! tree_fits_uhwi_p (len))
12616 return;
12617 }
12618 else
12619 return;
12620
12621 if (! tree_int_cst_lt (len, size))
12622 warning_at (tree_nonartificial_location (exp),
12623 0, "%Kcall to %D will always overflow destination buffer",
12624 exp, get_callee_fndecl (exp));
12625 }
12626
12627 /* Emit warning if a free is called with address of a variable. */
12628
12629 static void
12630 maybe_emit_free_warning (tree exp)
12631 {
12632 tree arg = CALL_EXPR_ARG (exp, 0);
12633
12634 STRIP_NOPS (arg);
12635 if (TREE_CODE (arg) != ADDR_EXPR)
12636 return;
12637
12638 arg = get_base_address (TREE_OPERAND (arg, 0));
12639 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12640 return;
12641
12642 if (SSA_VAR_P (arg))
12643 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12644 "%Kattempt to free a non-heap object %qD", exp, arg);
12645 else
12646 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12647 "%Kattempt to free a non-heap object", exp);
12648 }
12649
12650 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12651 if possible. */
12652
12653 tree
12654 fold_builtin_object_size (tree ptr, tree ost)
12655 {
12656 unsigned HOST_WIDE_INT bytes;
12657 int object_size_type;
12658
12659 if (!validate_arg (ptr, POINTER_TYPE)
12660 || !validate_arg (ost, INTEGER_TYPE))
12661 return NULL_TREE;
12662
12663 STRIP_NOPS (ost);
12664
12665 if (TREE_CODE (ost) != INTEGER_CST
12666 || tree_int_cst_sgn (ost) < 0
12667 || compare_tree_int (ost, 3) > 0)
12668 return NULL_TREE;
12669
12670 object_size_type = tree_to_shwi (ost);
12671
12672 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12673 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12674 and (size_t) 0 for types 2 and 3. */
12675 if (TREE_SIDE_EFFECTS (ptr))
12676 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12677
12678 if (TREE_CODE (ptr) == ADDR_EXPR)
12679 {
12680 bytes = compute_builtin_object_size (ptr, object_size_type);
12681 if (double_int_fits_to_tree_p (size_type_node,
12682 double_int::from_uhwi (bytes)))
12683 return build_int_cstu (size_type_node, bytes);
12684 }
12685 else if (TREE_CODE (ptr) == SSA_NAME)
12686 {
12687 /* If object size is not known yet, delay folding until
12688 later. Maybe subsequent passes will help determining
12689 it. */
12690 bytes = compute_builtin_object_size (ptr, object_size_type);
12691 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12692 && double_int_fits_to_tree_p (size_type_node,
12693 double_int::from_uhwi (bytes)))
12694 return build_int_cstu (size_type_node, bytes);
12695 }
12696
12697 return NULL_TREE;
12698 }
12699
12700 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12701 DEST, SRC, LEN, and SIZE are the arguments to the call.
12702 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12703 code of the builtin. If MAXLEN is not NULL, it is maximum length
12704 passed as third argument. */
12705
12706 tree
12707 fold_builtin_memory_chk (location_t loc, tree fndecl,
12708 tree dest, tree src, tree len, tree size,
12709 tree maxlen, bool ignore,
12710 enum built_in_function fcode)
12711 {
12712 tree fn;
12713
12714 if (!validate_arg (dest, POINTER_TYPE)
12715 || !validate_arg (src,
12716 (fcode == BUILT_IN_MEMSET_CHK
12717 ? INTEGER_TYPE : POINTER_TYPE))
12718 || !validate_arg (len, INTEGER_TYPE)
12719 || !validate_arg (size, INTEGER_TYPE))
12720 return NULL_TREE;
12721
12722 /* If SRC and DEST are the same (and not volatile), return DEST
12723 (resp. DEST+LEN for __mempcpy_chk). */
12724 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12725 {
12726 if (fcode != BUILT_IN_MEMPCPY_CHK)
12727 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12728 dest, len);
12729 else
12730 {
12731 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12732 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12733 }
12734 }
12735
12736 if (! tree_fits_uhwi_p (size))
12737 return NULL_TREE;
12738
12739 if (! integer_all_onesp (size))
12740 {
12741 if (! tree_fits_uhwi_p (len))
12742 {
12743 /* If LEN is not constant, try MAXLEN too.
12744 For MAXLEN only allow optimizing into non-_ocs function
12745 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12746 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12747 {
12748 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12749 {
12750 /* (void) __mempcpy_chk () can be optimized into
12751 (void) __memcpy_chk (). */
12752 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12753 if (!fn)
12754 return NULL_TREE;
12755
12756 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12757 }
12758 return NULL_TREE;
12759 }
12760 }
12761 else
12762 maxlen = len;
12763
12764 if (tree_int_cst_lt (size, maxlen))
12765 return NULL_TREE;
12766 }
12767
12768 fn = NULL_TREE;
12769 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12770 mem{cpy,pcpy,move,set} is available. */
12771 switch (fcode)
12772 {
12773 case BUILT_IN_MEMCPY_CHK:
12774 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12775 break;
12776 case BUILT_IN_MEMPCPY_CHK:
12777 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12778 break;
12779 case BUILT_IN_MEMMOVE_CHK:
12780 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12781 break;
12782 case BUILT_IN_MEMSET_CHK:
12783 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12784 break;
12785 default:
12786 break;
12787 }
12788
12789 if (!fn)
12790 return NULL_TREE;
12791
12792 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12793 }
12794
12795 /* Fold a call to the __st[rp]cpy_chk builtin.
12796 DEST, SRC, and SIZE are the arguments to the call.
12797 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12798 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12799 strings passed as second argument. */
12800
12801 tree
12802 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12803 tree src, tree size,
12804 tree maxlen, bool ignore,
12805 enum built_in_function fcode)
12806 {
12807 tree len, fn;
12808
12809 if (!validate_arg (dest, POINTER_TYPE)
12810 || !validate_arg (src, POINTER_TYPE)
12811 || !validate_arg (size, INTEGER_TYPE))
12812 return NULL_TREE;
12813
12814 /* If SRC and DEST are the same (and not volatile), return DEST. */
12815 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12816 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12817
12818 if (! tree_fits_uhwi_p (size))
12819 return NULL_TREE;
12820
12821 if (! integer_all_onesp (size))
12822 {
12823 len = c_strlen (src, 1);
12824 if (! len || ! tree_fits_uhwi_p (len))
12825 {
12826 /* If LEN is not constant, try MAXLEN too.
12827 For MAXLEN only allow optimizing into non-_ocs function
12828 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12829 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12830 {
12831 if (fcode == BUILT_IN_STPCPY_CHK)
12832 {
12833 if (! ignore)
12834 return NULL_TREE;
12835
12836 /* If return value of __stpcpy_chk is ignored,
12837 optimize into __strcpy_chk. */
12838 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12839 if (!fn)
12840 return NULL_TREE;
12841
12842 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12843 }
12844
12845 if (! len || TREE_SIDE_EFFECTS (len))
12846 return NULL_TREE;
12847
12848 /* If c_strlen returned something, but not a constant,
12849 transform __strcpy_chk into __memcpy_chk. */
12850 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12851 if (!fn)
12852 return NULL_TREE;
12853
12854 len = fold_convert_loc (loc, size_type_node, len);
12855 len = size_binop_loc (loc, PLUS_EXPR, len,
12856 build_int_cst (size_type_node, 1));
12857 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12858 build_call_expr_loc (loc, fn, 4,
12859 dest, src, len, size));
12860 }
12861 }
12862 else
12863 maxlen = len;
12864
12865 if (! tree_int_cst_lt (maxlen, size))
12866 return NULL_TREE;
12867 }
12868
12869 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12870 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12871 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12872 if (!fn)
12873 return NULL_TREE;
12874
12875 return build_call_expr_loc (loc, fn, 2, dest, src);
12876 }
12877
12878 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12879 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12880 length passed as third argument. IGNORE is true if return value can be
12881 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12882
12883 tree
12884 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12885 tree len, tree size, tree maxlen, bool ignore,
12886 enum built_in_function fcode)
12887 {
12888 tree fn;
12889
12890 if (!validate_arg (dest, POINTER_TYPE)
12891 || !validate_arg (src, POINTER_TYPE)
12892 || !validate_arg (len, INTEGER_TYPE)
12893 || !validate_arg (size, INTEGER_TYPE))
12894 return NULL_TREE;
12895
12896 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12897 {
12898 /* If return value of __stpncpy_chk is ignored,
12899 optimize into __strncpy_chk. */
12900 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12901 if (fn)
12902 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12903 }
12904
12905 if (! tree_fits_uhwi_p (size))
12906 return NULL_TREE;
12907
12908 if (! integer_all_onesp (size))
12909 {
12910 if (! tree_fits_uhwi_p (len))
12911 {
12912 /* If LEN is not constant, try MAXLEN too.
12913 For MAXLEN only allow optimizing into non-_ocs function
12914 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12915 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12916 return NULL_TREE;
12917 }
12918 else
12919 maxlen = len;
12920
12921 if (tree_int_cst_lt (size, maxlen))
12922 return NULL_TREE;
12923 }
12924
12925 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12926 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12927 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12928 if (!fn)
12929 return NULL_TREE;
12930
12931 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12932 }
12933
12934 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12935 are the arguments to the call. */
12936
12937 static tree
12938 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12939 tree src, tree size)
12940 {
12941 tree fn;
12942 const char *p;
12943
12944 if (!validate_arg (dest, POINTER_TYPE)
12945 || !validate_arg (src, POINTER_TYPE)
12946 || !validate_arg (size, INTEGER_TYPE))
12947 return NULL_TREE;
12948
12949 p = c_getstr (src);
12950 /* If the SRC parameter is "", return DEST. */
12951 if (p && *p == '\0')
12952 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12953
12954 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12955 return NULL_TREE;
12956
12957 /* If __builtin_strcat_chk is used, assume strcat is available. */
12958 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12959 if (!fn)
12960 return NULL_TREE;
12961
12962 return build_call_expr_loc (loc, fn, 2, dest, src);
12963 }
12964
12965 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12966 LEN, and SIZE. */
12967
12968 static tree
12969 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12970 tree dest, tree src, tree len, tree size)
12971 {
12972 tree fn;
12973 const char *p;
12974
12975 if (!validate_arg (dest, POINTER_TYPE)
12976 || !validate_arg (src, POINTER_TYPE)
12977 || !validate_arg (size, INTEGER_TYPE)
12978 || !validate_arg (size, INTEGER_TYPE))
12979 return NULL_TREE;
12980
12981 p = c_getstr (src);
12982 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12983 if (p && *p == '\0')
12984 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12985 else if (integer_zerop (len))
12986 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12987
12988 if (! tree_fits_uhwi_p (size))
12989 return NULL_TREE;
12990
12991 if (! integer_all_onesp (size))
12992 {
12993 tree src_len = c_strlen (src, 1);
12994 if (src_len
12995 && tree_fits_uhwi_p (src_len)
12996 && tree_fits_uhwi_p (len)
12997 && ! tree_int_cst_lt (len, src_len))
12998 {
12999 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13000 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13001 if (!fn)
13002 return NULL_TREE;
13003
13004 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13005 }
13006 return NULL_TREE;
13007 }
13008
13009 /* If __builtin_strncat_chk is used, assume strncat is available. */
13010 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13011 if (!fn)
13012 return NULL_TREE;
13013
13014 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13015 }
13016
13017 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13018 Return NULL_TREE if a normal call should be emitted rather than
13019 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13020 or BUILT_IN_VSPRINTF_CHK. */
13021
13022 static tree
13023 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13024 enum built_in_function fcode)
13025 {
13026 tree dest, size, len, fn, fmt, flag;
13027 const char *fmt_str;
13028
13029 /* Verify the required arguments in the original call. */
13030 if (nargs < 4)
13031 return NULL_TREE;
13032 dest = args[0];
13033 if (!validate_arg (dest, POINTER_TYPE))
13034 return NULL_TREE;
13035 flag = args[1];
13036 if (!validate_arg (flag, INTEGER_TYPE))
13037 return NULL_TREE;
13038 size = args[2];
13039 if (!validate_arg (size, INTEGER_TYPE))
13040 return NULL_TREE;
13041 fmt = args[3];
13042 if (!validate_arg (fmt, POINTER_TYPE))
13043 return NULL_TREE;
13044
13045 if (! tree_fits_uhwi_p (size))
13046 return NULL_TREE;
13047
13048 len = NULL_TREE;
13049
13050 if (!init_target_chars ())
13051 return NULL_TREE;
13052
13053 /* Check whether the format is a literal string constant. */
13054 fmt_str = c_getstr (fmt);
13055 if (fmt_str != NULL)
13056 {
13057 /* If the format doesn't contain % args or %%, we know the size. */
13058 if (strchr (fmt_str, target_percent) == 0)
13059 {
13060 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13061 len = build_int_cstu (size_type_node, strlen (fmt_str));
13062 }
13063 /* If the format is "%s" and first ... argument is a string literal,
13064 we know the size too. */
13065 else if (fcode == BUILT_IN_SPRINTF_CHK
13066 && strcmp (fmt_str, target_percent_s) == 0)
13067 {
13068 tree arg;
13069
13070 if (nargs == 5)
13071 {
13072 arg = args[4];
13073 if (validate_arg (arg, POINTER_TYPE))
13074 {
13075 len = c_strlen (arg, 1);
13076 if (! len || ! tree_fits_uhwi_p (len))
13077 len = NULL_TREE;
13078 }
13079 }
13080 }
13081 }
13082
13083 if (! integer_all_onesp (size))
13084 {
13085 if (! len || ! tree_int_cst_lt (len, size))
13086 return NULL_TREE;
13087 }
13088
13089 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13090 or if format doesn't contain % chars or is "%s". */
13091 if (! integer_zerop (flag))
13092 {
13093 if (fmt_str == NULL)
13094 return NULL_TREE;
13095 if (strchr (fmt_str, target_percent) != NULL
13096 && strcmp (fmt_str, target_percent_s))
13097 return NULL_TREE;
13098 }
13099
13100 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13101 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13102 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13103 if (!fn)
13104 return NULL_TREE;
13105
13106 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13107 }
13108
13109 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13110 a normal call should be emitted rather than expanding the function
13111 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13112
13113 static tree
13114 fold_builtin_sprintf_chk (location_t loc, tree exp,
13115 enum built_in_function fcode)
13116 {
13117 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13118 CALL_EXPR_ARGP (exp), fcode);
13119 }
13120
13121 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13122 NULL_TREE if a normal call should be emitted rather than expanding
13123 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13124 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13125 passed as second argument. */
13126
13127 static tree
13128 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13129 tree maxlen, enum built_in_function fcode)
13130 {
13131 tree dest, size, len, fn, fmt, flag;
13132 const char *fmt_str;
13133
13134 /* Verify the required arguments in the original call. */
13135 if (nargs < 5)
13136 return NULL_TREE;
13137 dest = args[0];
13138 if (!validate_arg (dest, POINTER_TYPE))
13139 return NULL_TREE;
13140 len = args[1];
13141 if (!validate_arg (len, INTEGER_TYPE))
13142 return NULL_TREE;
13143 flag = args[2];
13144 if (!validate_arg (flag, INTEGER_TYPE))
13145 return NULL_TREE;
13146 size = args[3];
13147 if (!validate_arg (size, INTEGER_TYPE))
13148 return NULL_TREE;
13149 fmt = args[4];
13150 if (!validate_arg (fmt, POINTER_TYPE))
13151 return NULL_TREE;
13152
13153 if (! tree_fits_uhwi_p (size))
13154 return NULL_TREE;
13155
13156 if (! integer_all_onesp (size))
13157 {
13158 if (! tree_fits_uhwi_p (len))
13159 {
13160 /* If LEN is not constant, try MAXLEN too.
13161 For MAXLEN only allow optimizing into non-_ocs function
13162 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13163 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13164 return NULL_TREE;
13165 }
13166 else
13167 maxlen = len;
13168
13169 if (tree_int_cst_lt (size, maxlen))
13170 return NULL_TREE;
13171 }
13172
13173 if (!init_target_chars ())
13174 return NULL_TREE;
13175
13176 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13177 or if format doesn't contain % chars or is "%s". */
13178 if (! integer_zerop (flag))
13179 {
13180 fmt_str = c_getstr (fmt);
13181 if (fmt_str == NULL)
13182 return NULL_TREE;
13183 if (strchr (fmt_str, target_percent) != NULL
13184 && strcmp (fmt_str, target_percent_s))
13185 return NULL_TREE;
13186 }
13187
13188 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13189 available. */
13190 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13191 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13192 if (!fn)
13193 return NULL_TREE;
13194
13195 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13196 }
13197
13198 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13199 a normal call should be emitted rather than expanding the function
13200 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13201 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13202 passed as second argument. */
13203
13204 static tree
13205 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13206 enum built_in_function fcode)
13207 {
13208 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13209 CALL_EXPR_ARGP (exp), maxlen, fcode);
13210 }
13211
13212 /* Builtins with folding operations that operate on "..." arguments
13213 need special handling; we need to store the arguments in a convenient
13214 data structure before attempting any folding. Fortunately there are
13215 only a few builtins that fall into this category. FNDECL is the
13216 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13217 result of the function call is ignored. */
13218
13219 static tree
13220 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13221 bool ignore ATTRIBUTE_UNUSED)
13222 {
13223 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13224 tree ret = NULL_TREE;
13225
13226 switch (fcode)
13227 {
13228 case BUILT_IN_SPRINTF_CHK:
13229 case BUILT_IN_VSPRINTF_CHK:
13230 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13231 break;
13232
13233 case BUILT_IN_SNPRINTF_CHK:
13234 case BUILT_IN_VSNPRINTF_CHK:
13235 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13236 break;
13237
13238 case BUILT_IN_FPCLASSIFY:
13239 ret = fold_builtin_fpclassify (loc, exp);
13240 break;
13241
13242 default:
13243 break;
13244 }
13245 if (ret)
13246 {
13247 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13248 SET_EXPR_LOCATION (ret, loc);
13249 TREE_NO_WARNING (ret) = 1;
13250 return ret;
13251 }
13252 return NULL_TREE;
13253 }
13254
13255 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13256 FMT and ARG are the arguments to the call; we don't fold cases with
13257 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13258
13259 Return NULL_TREE if no simplification was possible, otherwise return the
13260 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13261 code of the function to be simplified. */
13262
13263 static tree
13264 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13265 tree arg, bool ignore,
13266 enum built_in_function fcode)
13267 {
13268 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13269 const char *fmt_str = NULL;
13270
13271 /* If the return value is used, don't do the transformation. */
13272 if (! ignore)
13273 return NULL_TREE;
13274
13275 /* Verify the required arguments in the original call. */
13276 if (!validate_arg (fmt, POINTER_TYPE))
13277 return NULL_TREE;
13278
13279 /* Check whether the format is a literal string constant. */
13280 fmt_str = c_getstr (fmt);
13281 if (fmt_str == NULL)
13282 return NULL_TREE;
13283
13284 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13285 {
13286 /* If we're using an unlocked function, assume the other
13287 unlocked functions exist explicitly. */
13288 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13289 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13290 }
13291 else
13292 {
13293 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13294 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13295 }
13296
13297 if (!init_target_chars ())
13298 return NULL_TREE;
13299
13300 if (strcmp (fmt_str, target_percent_s) == 0
13301 || strchr (fmt_str, target_percent) == NULL)
13302 {
13303 const char *str;
13304
13305 if (strcmp (fmt_str, target_percent_s) == 0)
13306 {
13307 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13308 return NULL_TREE;
13309
13310 if (!arg || !validate_arg (arg, POINTER_TYPE))
13311 return NULL_TREE;
13312
13313 str = c_getstr (arg);
13314 if (str == NULL)
13315 return NULL_TREE;
13316 }
13317 else
13318 {
13319 /* The format specifier doesn't contain any '%' characters. */
13320 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13321 && arg)
13322 return NULL_TREE;
13323 str = fmt_str;
13324 }
13325
13326 /* If the string was "", printf does nothing. */
13327 if (str[0] == '\0')
13328 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13329
13330 /* If the string has length of 1, call putchar. */
13331 if (str[1] == '\0')
13332 {
13333 /* Given printf("c"), (where c is any one character,)
13334 convert "c"[0] to an int and pass that to the replacement
13335 function. */
13336 newarg = build_int_cst (integer_type_node, str[0]);
13337 if (fn_putchar)
13338 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13339 }
13340 else
13341 {
13342 /* If the string was "string\n", call puts("string"). */
13343 size_t len = strlen (str);
13344 if ((unsigned char)str[len - 1] == target_newline
13345 && (size_t) (int) len == len
13346 && (int) len > 0)
13347 {
13348 char *newstr;
13349 tree offset_node, string_cst;
13350
13351 /* Create a NUL-terminated string that's one char shorter
13352 than the original, stripping off the trailing '\n'. */
13353 newarg = build_string_literal (len, str);
13354 string_cst = string_constant (newarg, &offset_node);
13355 gcc_checking_assert (string_cst
13356 && (TREE_STRING_LENGTH (string_cst)
13357 == (int) len)
13358 && integer_zerop (offset_node)
13359 && (unsigned char)
13360 TREE_STRING_POINTER (string_cst)[len - 1]
13361 == target_newline);
13362 /* build_string_literal creates a new STRING_CST,
13363 modify it in place to avoid double copying. */
13364 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13365 newstr[len - 1] = '\0';
13366 if (fn_puts)
13367 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13368 }
13369 else
13370 /* We'd like to arrange to call fputs(string,stdout) here,
13371 but we need stdout and don't have a way to get it yet. */
13372 return NULL_TREE;
13373 }
13374 }
13375
13376 /* The other optimizations can be done only on the non-va_list variants. */
13377 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13378 return NULL_TREE;
13379
13380 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13381 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13382 {
13383 if (!arg || !validate_arg (arg, POINTER_TYPE))
13384 return NULL_TREE;
13385 if (fn_puts)
13386 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13387 }
13388
13389 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13390 else if (strcmp (fmt_str, target_percent_c) == 0)
13391 {
13392 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13393 return NULL_TREE;
13394 if (fn_putchar)
13395 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13396 }
13397
13398 if (!call)
13399 return NULL_TREE;
13400
13401 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13402 }
13403
13404 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13405 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13406 more than 3 arguments, and ARG may be null in the 2-argument case.
13407
13408 Return NULL_TREE if no simplification was possible, otherwise return the
13409 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13410 code of the function to be simplified. */
13411
13412 static tree
13413 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13414 tree fmt, tree arg, bool ignore,
13415 enum built_in_function fcode)
13416 {
13417 tree fn_fputc, fn_fputs, call = NULL_TREE;
13418 const char *fmt_str = NULL;
13419
13420 /* If the return value is used, don't do the transformation. */
13421 if (! ignore)
13422 return NULL_TREE;
13423
13424 /* Verify the required arguments in the original call. */
13425 if (!validate_arg (fp, POINTER_TYPE))
13426 return NULL_TREE;
13427 if (!validate_arg (fmt, POINTER_TYPE))
13428 return NULL_TREE;
13429
13430 /* Check whether the format is a literal string constant. */
13431 fmt_str = c_getstr (fmt);
13432 if (fmt_str == NULL)
13433 return NULL_TREE;
13434
13435 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13436 {
13437 /* If we're using an unlocked function, assume the other
13438 unlocked functions exist explicitly. */
13439 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13440 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13441 }
13442 else
13443 {
13444 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13445 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13446 }
13447
13448 if (!init_target_chars ())
13449 return NULL_TREE;
13450
13451 /* If the format doesn't contain % args or %%, use strcpy. */
13452 if (strchr (fmt_str, target_percent) == NULL)
13453 {
13454 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13455 && arg)
13456 return NULL_TREE;
13457
13458 /* If the format specifier was "", fprintf does nothing. */
13459 if (fmt_str[0] == '\0')
13460 {
13461 /* If FP has side-effects, just wait until gimplification is
13462 done. */
13463 if (TREE_SIDE_EFFECTS (fp))
13464 return NULL_TREE;
13465
13466 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13467 }
13468
13469 /* When "string" doesn't contain %, replace all cases of
13470 fprintf (fp, string) with fputs (string, fp). The fputs
13471 builtin will take care of special cases like length == 1. */
13472 if (fn_fputs)
13473 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13474 }
13475
13476 /* The other optimizations can be done only on the non-va_list variants. */
13477 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13478 return NULL_TREE;
13479
13480 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13481 else if (strcmp (fmt_str, target_percent_s) == 0)
13482 {
13483 if (!arg || !validate_arg (arg, POINTER_TYPE))
13484 return NULL_TREE;
13485 if (fn_fputs)
13486 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13487 }
13488
13489 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13490 else if (strcmp (fmt_str, target_percent_c) == 0)
13491 {
13492 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13493 return NULL_TREE;
13494 if (fn_fputc)
13495 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13496 }
13497
13498 if (!call)
13499 return NULL_TREE;
13500 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13501 }
13502
13503 /* Initialize format string characters in the target charset. */
13504
13505 static bool
13506 init_target_chars (void)
13507 {
13508 static bool init;
13509 if (!init)
13510 {
13511 target_newline = lang_hooks.to_target_charset ('\n');
13512 target_percent = lang_hooks.to_target_charset ('%');
13513 target_c = lang_hooks.to_target_charset ('c');
13514 target_s = lang_hooks.to_target_charset ('s');
13515 if (target_newline == 0 || target_percent == 0 || target_c == 0
13516 || target_s == 0)
13517 return false;
13518
13519 target_percent_c[0] = target_percent;
13520 target_percent_c[1] = target_c;
13521 target_percent_c[2] = '\0';
13522
13523 target_percent_s[0] = target_percent;
13524 target_percent_s[1] = target_s;
13525 target_percent_s[2] = '\0';
13526
13527 target_percent_s_newline[0] = target_percent;
13528 target_percent_s_newline[1] = target_s;
13529 target_percent_s_newline[2] = target_newline;
13530 target_percent_s_newline[3] = '\0';
13531
13532 init = true;
13533 }
13534 return true;
13535 }
13536
13537 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13538 and no overflow/underflow occurred. INEXACT is true if M was not
13539 exactly calculated. TYPE is the tree type for the result. This
13540 function assumes that you cleared the MPFR flags and then
13541 calculated M to see if anything subsequently set a flag prior to
13542 entering this function. Return NULL_TREE if any checks fail. */
13543
13544 static tree
13545 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13546 {
13547 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13548 overflow/underflow occurred. If -frounding-math, proceed iff the
13549 result of calling FUNC was exact. */
13550 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13551 && (!flag_rounding_math || !inexact))
13552 {
13553 REAL_VALUE_TYPE rr;
13554
13555 real_from_mpfr (&rr, m, type, GMP_RNDN);
13556 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13557 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13558 but the mpft_t is not, then we underflowed in the
13559 conversion. */
13560 if (real_isfinite (&rr)
13561 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13562 {
13563 REAL_VALUE_TYPE rmode;
13564
13565 real_convert (&rmode, TYPE_MODE (type), &rr);
13566 /* Proceed iff the specified mode can hold the value. */
13567 if (real_identical (&rmode, &rr))
13568 return build_real (type, rmode);
13569 }
13570 }
13571 return NULL_TREE;
13572 }
13573
13574 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13575 number and no overflow/underflow occurred. INEXACT is true if M
13576 was not exactly calculated. TYPE is the tree type for the result.
13577 This function assumes that you cleared the MPFR flags and then
13578 calculated M to see if anything subsequently set a flag prior to
13579 entering this function. Return NULL_TREE if any checks fail, if
13580 FORCE_CONVERT is true, then bypass the checks. */
13581
13582 static tree
13583 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13584 {
13585 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13586 overflow/underflow occurred. If -frounding-math, proceed iff the
13587 result of calling FUNC was exact. */
13588 if (force_convert
13589 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13590 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13591 && (!flag_rounding_math || !inexact)))
13592 {
13593 REAL_VALUE_TYPE re, im;
13594
13595 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13596 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13597 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13598 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13599 but the mpft_t is not, then we underflowed in the
13600 conversion. */
13601 if (force_convert
13602 || (real_isfinite (&re) && real_isfinite (&im)
13603 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13604 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13605 {
13606 REAL_VALUE_TYPE re_mode, im_mode;
13607
13608 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13609 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13610 /* Proceed iff the specified mode can hold the value. */
13611 if (force_convert
13612 || (real_identical (&re_mode, &re)
13613 && real_identical (&im_mode, &im)))
13614 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13615 build_real (TREE_TYPE (type), im_mode));
13616 }
13617 }
13618 return NULL_TREE;
13619 }
13620
13621 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13622 FUNC on it and return the resulting value as a tree with type TYPE.
13623 If MIN and/or MAX are not NULL, then the supplied ARG must be
13624 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13625 acceptable values, otherwise they are not. The mpfr precision is
13626 set to the precision of TYPE. We assume that function FUNC returns
13627 zero if the result could be calculated exactly within the requested
13628 precision. */
13629
13630 static tree
13631 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13632 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13633 bool inclusive)
13634 {
13635 tree result = NULL_TREE;
13636
13637 STRIP_NOPS (arg);
13638
13639 /* To proceed, MPFR must exactly represent the target floating point
13640 format, which only happens when the target base equals two. */
13641 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13642 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13643 {
13644 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13645
13646 if (real_isfinite (ra)
13647 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13648 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13649 {
13650 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13651 const int prec = fmt->p;
13652 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13653 int inexact;
13654 mpfr_t m;
13655
13656 mpfr_init2 (m, prec);
13657 mpfr_from_real (m, ra, GMP_RNDN);
13658 mpfr_clear_flags ();
13659 inexact = func (m, m, rnd);
13660 result = do_mpfr_ckconv (m, type, inexact);
13661 mpfr_clear (m);
13662 }
13663 }
13664
13665 return result;
13666 }
13667
13668 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13669 FUNC on it and return the resulting value as a tree with type TYPE.
13670 The mpfr precision is set to the precision of TYPE. We assume that
13671 function FUNC returns zero if the result could be calculated
13672 exactly within the requested precision. */
13673
13674 static tree
13675 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13676 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13677 {
13678 tree result = NULL_TREE;
13679
13680 STRIP_NOPS (arg1);
13681 STRIP_NOPS (arg2);
13682
13683 /* To proceed, MPFR must exactly represent the target floating point
13684 format, which only happens when the target base equals two. */
13685 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13686 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13687 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13688 {
13689 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13690 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13691
13692 if (real_isfinite (ra1) && real_isfinite (ra2))
13693 {
13694 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13695 const int prec = fmt->p;
13696 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13697 int inexact;
13698 mpfr_t m1, m2;
13699
13700 mpfr_inits2 (prec, m1, m2, NULL);
13701 mpfr_from_real (m1, ra1, GMP_RNDN);
13702 mpfr_from_real (m2, ra2, GMP_RNDN);
13703 mpfr_clear_flags ();
13704 inexact = func (m1, m1, m2, rnd);
13705 result = do_mpfr_ckconv (m1, type, inexact);
13706 mpfr_clears (m1, m2, NULL);
13707 }
13708 }
13709
13710 return result;
13711 }
13712
13713 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13714 FUNC on it and return the resulting value as a tree with type TYPE.
13715 The mpfr precision is set to the precision of TYPE. We assume that
13716 function FUNC returns zero if the result could be calculated
13717 exactly within the requested precision. */
13718
13719 static tree
13720 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13721 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13722 {
13723 tree result = NULL_TREE;
13724
13725 STRIP_NOPS (arg1);
13726 STRIP_NOPS (arg2);
13727 STRIP_NOPS (arg3);
13728
13729 /* To proceed, MPFR must exactly represent the target floating point
13730 format, which only happens when the target base equals two. */
13731 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13732 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13733 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13734 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13735 {
13736 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13737 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13738 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13739
13740 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13741 {
13742 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13743 const int prec = fmt->p;
13744 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13745 int inexact;
13746 mpfr_t m1, m2, m3;
13747
13748 mpfr_inits2 (prec, m1, m2, m3, NULL);
13749 mpfr_from_real (m1, ra1, GMP_RNDN);
13750 mpfr_from_real (m2, ra2, GMP_RNDN);
13751 mpfr_from_real (m3, ra3, GMP_RNDN);
13752 mpfr_clear_flags ();
13753 inexact = func (m1, m1, m2, m3, rnd);
13754 result = do_mpfr_ckconv (m1, type, inexact);
13755 mpfr_clears (m1, m2, m3, NULL);
13756 }
13757 }
13758
13759 return result;
13760 }
13761
13762 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13763 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13764 If ARG_SINP and ARG_COSP are NULL then the result is returned
13765 as a complex value.
13766 The type is taken from the type of ARG and is used for setting the
13767 precision of the calculation and results. */
13768
13769 static tree
13770 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13771 {
13772 tree const type = TREE_TYPE (arg);
13773 tree result = NULL_TREE;
13774
13775 STRIP_NOPS (arg);
13776
13777 /* To proceed, MPFR must exactly represent the target floating point
13778 format, which only happens when the target base equals two. */
13779 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13780 && TREE_CODE (arg) == REAL_CST
13781 && !TREE_OVERFLOW (arg))
13782 {
13783 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13784
13785 if (real_isfinite (ra))
13786 {
13787 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13788 const int prec = fmt->p;
13789 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13790 tree result_s, result_c;
13791 int inexact;
13792 mpfr_t m, ms, mc;
13793
13794 mpfr_inits2 (prec, m, ms, mc, NULL);
13795 mpfr_from_real (m, ra, GMP_RNDN);
13796 mpfr_clear_flags ();
13797 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13798 result_s = do_mpfr_ckconv (ms, type, inexact);
13799 result_c = do_mpfr_ckconv (mc, type, inexact);
13800 mpfr_clears (m, ms, mc, NULL);
13801 if (result_s && result_c)
13802 {
13803 /* If we are to return in a complex value do so. */
13804 if (!arg_sinp && !arg_cosp)
13805 return build_complex (build_complex_type (type),
13806 result_c, result_s);
13807
13808 /* Dereference the sin/cos pointer arguments. */
13809 arg_sinp = build_fold_indirect_ref (arg_sinp);
13810 arg_cosp = build_fold_indirect_ref (arg_cosp);
13811 /* Proceed if valid pointer type were passed in. */
13812 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13813 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13814 {
13815 /* Set the values. */
13816 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13817 result_s);
13818 TREE_SIDE_EFFECTS (result_s) = 1;
13819 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13820 result_c);
13821 TREE_SIDE_EFFECTS (result_c) = 1;
13822 /* Combine the assignments into a compound expr. */
13823 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13824 result_s, result_c));
13825 }
13826 }
13827 }
13828 }
13829 return result;
13830 }
13831
13832 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13833 two-argument mpfr order N Bessel function FUNC on them and return
13834 the resulting value as a tree with type TYPE. The mpfr precision
13835 is set to the precision of TYPE. We assume that function FUNC
13836 returns zero if the result could be calculated exactly within the
13837 requested precision. */
13838 static tree
13839 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13840 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13841 const REAL_VALUE_TYPE *min, bool inclusive)
13842 {
13843 tree result = NULL_TREE;
13844
13845 STRIP_NOPS (arg1);
13846 STRIP_NOPS (arg2);
13847
13848 /* To proceed, MPFR must exactly represent the target floating point
13849 format, which only happens when the target base equals two. */
13850 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13851 && tree_fits_shwi_p (arg1)
13852 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13853 {
13854 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13855 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13856
13857 if (n == (long)n
13858 && real_isfinite (ra)
13859 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13860 {
13861 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13862 const int prec = fmt->p;
13863 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13864 int inexact;
13865 mpfr_t m;
13866
13867 mpfr_init2 (m, prec);
13868 mpfr_from_real (m, ra, GMP_RNDN);
13869 mpfr_clear_flags ();
13870 inexact = func (m, n, m, rnd);
13871 result = do_mpfr_ckconv (m, type, inexact);
13872 mpfr_clear (m);
13873 }
13874 }
13875
13876 return result;
13877 }
13878
13879 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13880 the pointer *(ARG_QUO) and return the result. The type is taken
13881 from the type of ARG0 and is used for setting the precision of the
13882 calculation and results. */
13883
13884 static tree
13885 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13886 {
13887 tree const type = TREE_TYPE (arg0);
13888 tree result = NULL_TREE;
13889
13890 STRIP_NOPS (arg0);
13891 STRIP_NOPS (arg1);
13892
13893 /* To proceed, MPFR must exactly represent the target floating point
13894 format, which only happens when the target base equals two. */
13895 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13896 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13897 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13898 {
13899 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13900 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13901
13902 if (real_isfinite (ra0) && real_isfinite (ra1))
13903 {
13904 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13905 const int prec = fmt->p;
13906 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13907 tree result_rem;
13908 long integer_quo;
13909 mpfr_t m0, m1;
13910
13911 mpfr_inits2 (prec, m0, m1, NULL);
13912 mpfr_from_real (m0, ra0, GMP_RNDN);
13913 mpfr_from_real (m1, ra1, GMP_RNDN);
13914 mpfr_clear_flags ();
13915 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13916 /* Remquo is independent of the rounding mode, so pass
13917 inexact=0 to do_mpfr_ckconv(). */
13918 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13919 mpfr_clears (m0, m1, NULL);
13920 if (result_rem)
13921 {
13922 /* MPFR calculates quo in the host's long so it may
13923 return more bits in quo than the target int can hold
13924 if sizeof(host long) > sizeof(target int). This can
13925 happen even for native compilers in LP64 mode. In
13926 these cases, modulo the quo value with the largest
13927 number that the target int can hold while leaving one
13928 bit for the sign. */
13929 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13930 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13931
13932 /* Dereference the quo pointer argument. */
13933 arg_quo = build_fold_indirect_ref (arg_quo);
13934 /* Proceed iff a valid pointer type was passed in. */
13935 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13936 {
13937 /* Set the value. */
13938 tree result_quo
13939 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13940 build_int_cst (TREE_TYPE (arg_quo),
13941 integer_quo));
13942 TREE_SIDE_EFFECTS (result_quo) = 1;
13943 /* Combine the quo assignment with the rem. */
13944 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13945 result_quo, result_rem));
13946 }
13947 }
13948 }
13949 }
13950 return result;
13951 }
13952
13953 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13954 resulting value as a tree with type TYPE. The mpfr precision is
13955 set to the precision of TYPE. We assume that this mpfr function
13956 returns zero if the result could be calculated exactly within the
13957 requested precision. In addition, the integer pointer represented
13958 by ARG_SG will be dereferenced and set to the appropriate signgam
13959 (-1,1) value. */
13960
13961 static tree
13962 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13963 {
13964 tree result = NULL_TREE;
13965
13966 STRIP_NOPS (arg);
13967
13968 /* To proceed, MPFR must exactly represent the target floating point
13969 format, which only happens when the target base equals two. Also
13970 verify ARG is a constant and that ARG_SG is an int pointer. */
13971 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13972 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13973 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13974 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13975 {
13976 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13977
13978 /* In addition to NaN and Inf, the argument cannot be zero or a
13979 negative integer. */
13980 if (real_isfinite (ra)
13981 && ra->cl != rvc_zero
13982 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
13983 {
13984 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13985 const int prec = fmt->p;
13986 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13987 int inexact, sg;
13988 mpfr_t m;
13989 tree result_lg;
13990
13991 mpfr_init2 (m, prec);
13992 mpfr_from_real (m, ra, GMP_RNDN);
13993 mpfr_clear_flags ();
13994 inexact = mpfr_lgamma (m, &sg, m, rnd);
13995 result_lg = do_mpfr_ckconv (m, type, inexact);
13996 mpfr_clear (m);
13997 if (result_lg)
13998 {
13999 tree result_sg;
14000
14001 /* Dereference the arg_sg pointer argument. */
14002 arg_sg = build_fold_indirect_ref (arg_sg);
14003 /* Assign the signgam value into *arg_sg. */
14004 result_sg = fold_build2 (MODIFY_EXPR,
14005 TREE_TYPE (arg_sg), arg_sg,
14006 build_int_cst (TREE_TYPE (arg_sg), sg));
14007 TREE_SIDE_EFFECTS (result_sg) = 1;
14008 /* Combine the signgam assignment with the lgamma result. */
14009 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14010 result_sg, result_lg));
14011 }
14012 }
14013 }
14014
14015 return result;
14016 }
14017
14018 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14019 function FUNC on it and return the resulting value as a tree with
14020 type TYPE. The mpfr precision is set to the precision of TYPE. We
14021 assume that function FUNC returns zero if the result could be
14022 calculated exactly within the requested precision. */
14023
14024 static tree
14025 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14026 {
14027 tree result = NULL_TREE;
14028
14029 STRIP_NOPS (arg);
14030
14031 /* To proceed, MPFR must exactly represent the target floating point
14032 format, which only happens when the target base equals two. */
14033 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14034 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14035 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14036 {
14037 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14038 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14039
14040 if (real_isfinite (re) && real_isfinite (im))
14041 {
14042 const struct real_format *const fmt =
14043 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14044 const int prec = fmt->p;
14045 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14046 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14047 int inexact;
14048 mpc_t m;
14049
14050 mpc_init2 (m, prec);
14051 mpfr_from_real (mpc_realref (m), re, rnd);
14052 mpfr_from_real (mpc_imagref (m), im, rnd);
14053 mpfr_clear_flags ();
14054 inexact = func (m, m, crnd);
14055 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14056 mpc_clear (m);
14057 }
14058 }
14059
14060 return result;
14061 }
14062
14063 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14064 mpc function FUNC on it and return the resulting value as a tree
14065 with type TYPE. The mpfr precision is set to the precision of
14066 TYPE. We assume that function FUNC returns zero if the result
14067 could be calculated exactly within the requested precision. If
14068 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14069 in the arguments and/or results. */
14070
14071 tree
14072 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14073 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14074 {
14075 tree result = NULL_TREE;
14076
14077 STRIP_NOPS (arg0);
14078 STRIP_NOPS (arg1);
14079
14080 /* To proceed, MPFR must exactly represent the target floating point
14081 format, which only happens when the target base equals two. */
14082 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14083 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14084 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14085 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14086 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14087 {
14088 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14089 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14090 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14091 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14092
14093 if (do_nonfinite
14094 || (real_isfinite (re0) && real_isfinite (im0)
14095 && real_isfinite (re1) && real_isfinite (im1)))
14096 {
14097 const struct real_format *const fmt =
14098 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14099 const int prec = fmt->p;
14100 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14101 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14102 int inexact;
14103 mpc_t m0, m1;
14104
14105 mpc_init2 (m0, prec);
14106 mpc_init2 (m1, prec);
14107 mpfr_from_real (mpc_realref (m0), re0, rnd);
14108 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14109 mpfr_from_real (mpc_realref (m1), re1, rnd);
14110 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14111 mpfr_clear_flags ();
14112 inexact = func (m0, m0, m1, crnd);
14113 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14114 mpc_clear (m0);
14115 mpc_clear (m1);
14116 }
14117 }
14118
14119 return result;
14120 }
14121
14122 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14123 a normal call should be emitted rather than expanding the function
14124 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14125
14126 static tree
14127 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14128 {
14129 int nargs = gimple_call_num_args (stmt);
14130
14131 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14132 (nargs > 0
14133 ? gimple_call_arg_ptr (stmt, 0)
14134 : &error_mark_node), fcode);
14135 }
14136
14137 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14138 a normal call should be emitted rather than expanding the function
14139 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14140 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14141 passed as second argument. */
14142
14143 tree
14144 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14145 enum built_in_function fcode)
14146 {
14147 int nargs = gimple_call_num_args (stmt);
14148
14149 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14150 (nargs > 0
14151 ? gimple_call_arg_ptr (stmt, 0)
14152 : &error_mark_node), maxlen, fcode);
14153 }
14154
14155 /* Builtins with folding operations that operate on "..." arguments
14156 need special handling; we need to store the arguments in a convenient
14157 data structure before attempting any folding. Fortunately there are
14158 only a few builtins that fall into this category. FNDECL is the
14159 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14160 result of the function call is ignored. */
14161
14162 static tree
14163 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14164 bool ignore ATTRIBUTE_UNUSED)
14165 {
14166 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14167 tree ret = NULL_TREE;
14168
14169 switch (fcode)
14170 {
14171 case BUILT_IN_SPRINTF_CHK:
14172 case BUILT_IN_VSPRINTF_CHK:
14173 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14174 break;
14175
14176 case BUILT_IN_SNPRINTF_CHK:
14177 case BUILT_IN_VSNPRINTF_CHK:
14178 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14179
14180 default:
14181 break;
14182 }
14183 if (ret)
14184 {
14185 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14186 TREE_NO_WARNING (ret) = 1;
14187 return ret;
14188 }
14189 return NULL_TREE;
14190 }
14191
14192 /* A wrapper function for builtin folding that prevents warnings for
14193 "statement without effect" and the like, caused by removing the
14194 call node earlier than the warning is generated. */
14195
14196 tree
14197 fold_call_stmt (gimple stmt, bool ignore)
14198 {
14199 tree ret = NULL_TREE;
14200 tree fndecl = gimple_call_fndecl (stmt);
14201 location_t loc = gimple_location (stmt);
14202 if (fndecl
14203 && TREE_CODE (fndecl) == FUNCTION_DECL
14204 && DECL_BUILT_IN (fndecl)
14205 && !gimple_call_va_arg_pack_p (stmt))
14206 {
14207 int nargs = gimple_call_num_args (stmt);
14208 tree *args = (nargs > 0
14209 ? gimple_call_arg_ptr (stmt, 0)
14210 : &error_mark_node);
14211
14212 if (avoid_folding_inline_builtin (fndecl))
14213 return NULL_TREE;
14214 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14215 {
14216 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14217 }
14218 else
14219 {
14220 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14221 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14222 if (!ret)
14223 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14224 if (ret)
14225 {
14226 /* Propagate location information from original call to
14227 expansion of builtin. Otherwise things like
14228 maybe_emit_chk_warning, that operate on the expansion
14229 of a builtin, will use the wrong location information. */
14230 if (gimple_has_location (stmt))
14231 {
14232 tree realret = ret;
14233 if (TREE_CODE (ret) == NOP_EXPR)
14234 realret = TREE_OPERAND (ret, 0);
14235 if (CAN_HAVE_LOCATION_P (realret)
14236 && !EXPR_HAS_LOCATION (realret))
14237 SET_EXPR_LOCATION (realret, loc);
14238 return realret;
14239 }
14240 return ret;
14241 }
14242 }
14243 }
14244 return NULL_TREE;
14245 }
14246
14247 /* Look up the function in builtin_decl that corresponds to DECL
14248 and set ASMSPEC as its user assembler name. DECL must be a
14249 function decl that declares a builtin. */
14250
14251 void
14252 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14253 {
14254 tree builtin;
14255 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14256 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14257 && asmspec != 0);
14258
14259 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14260 set_user_assembler_name (builtin, asmspec);
14261 switch (DECL_FUNCTION_CODE (decl))
14262 {
14263 case BUILT_IN_MEMCPY:
14264 init_block_move_fn (asmspec);
14265 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14266 break;
14267 case BUILT_IN_MEMSET:
14268 init_block_clear_fn (asmspec);
14269 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14270 break;
14271 case BUILT_IN_MEMMOVE:
14272 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14273 break;
14274 case BUILT_IN_MEMCMP:
14275 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14276 break;
14277 case BUILT_IN_ABORT:
14278 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14279 break;
14280 case BUILT_IN_FFS:
14281 if (INT_TYPE_SIZE < BITS_PER_WORD)
14282 {
14283 set_user_assembler_libfunc ("ffs", asmspec);
14284 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14285 MODE_INT, 0), "ffs");
14286 }
14287 break;
14288 default:
14289 break;
14290 }
14291 }
14292
14293 /* Return true if DECL is a builtin that expands to a constant or similarly
14294 simple code. */
14295 bool
14296 is_simple_builtin (tree decl)
14297 {
14298 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14299 switch (DECL_FUNCTION_CODE (decl))
14300 {
14301 /* Builtins that expand to constants. */
14302 case BUILT_IN_CONSTANT_P:
14303 case BUILT_IN_EXPECT:
14304 case BUILT_IN_OBJECT_SIZE:
14305 case BUILT_IN_UNREACHABLE:
14306 /* Simple register moves or loads from stack. */
14307 case BUILT_IN_ASSUME_ALIGNED:
14308 case BUILT_IN_RETURN_ADDRESS:
14309 case BUILT_IN_EXTRACT_RETURN_ADDR:
14310 case BUILT_IN_FROB_RETURN_ADDR:
14311 case BUILT_IN_RETURN:
14312 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14313 case BUILT_IN_FRAME_ADDRESS:
14314 case BUILT_IN_VA_END:
14315 case BUILT_IN_STACK_SAVE:
14316 case BUILT_IN_STACK_RESTORE:
14317 /* Exception state returns or moves registers around. */
14318 case BUILT_IN_EH_FILTER:
14319 case BUILT_IN_EH_POINTER:
14320 case BUILT_IN_EH_COPY_VALUES:
14321 return true;
14322
14323 default:
14324 return false;
14325 }
14326
14327 return false;
14328 }
14329
14330 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14331 most probably expanded inline into reasonably simple code. This is a
14332 superset of is_simple_builtin. */
14333 bool
14334 is_inexpensive_builtin (tree decl)
14335 {
14336 if (!decl)
14337 return false;
14338 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14339 return true;
14340 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14341 switch (DECL_FUNCTION_CODE (decl))
14342 {
14343 case BUILT_IN_ABS:
14344 case BUILT_IN_ALLOCA:
14345 case BUILT_IN_ALLOCA_WITH_ALIGN:
14346 case BUILT_IN_BSWAP16:
14347 case BUILT_IN_BSWAP32:
14348 case BUILT_IN_BSWAP64:
14349 case BUILT_IN_CLZ:
14350 case BUILT_IN_CLZIMAX:
14351 case BUILT_IN_CLZL:
14352 case BUILT_IN_CLZLL:
14353 case BUILT_IN_CTZ:
14354 case BUILT_IN_CTZIMAX:
14355 case BUILT_IN_CTZL:
14356 case BUILT_IN_CTZLL:
14357 case BUILT_IN_FFS:
14358 case BUILT_IN_FFSIMAX:
14359 case BUILT_IN_FFSL:
14360 case BUILT_IN_FFSLL:
14361 case BUILT_IN_IMAXABS:
14362 case BUILT_IN_FINITE:
14363 case BUILT_IN_FINITEF:
14364 case BUILT_IN_FINITEL:
14365 case BUILT_IN_FINITED32:
14366 case BUILT_IN_FINITED64:
14367 case BUILT_IN_FINITED128:
14368 case BUILT_IN_FPCLASSIFY:
14369 case BUILT_IN_ISFINITE:
14370 case BUILT_IN_ISINF_SIGN:
14371 case BUILT_IN_ISINF:
14372 case BUILT_IN_ISINFF:
14373 case BUILT_IN_ISINFL:
14374 case BUILT_IN_ISINFD32:
14375 case BUILT_IN_ISINFD64:
14376 case BUILT_IN_ISINFD128:
14377 case BUILT_IN_ISNAN:
14378 case BUILT_IN_ISNANF:
14379 case BUILT_IN_ISNANL:
14380 case BUILT_IN_ISNAND32:
14381 case BUILT_IN_ISNAND64:
14382 case BUILT_IN_ISNAND128:
14383 case BUILT_IN_ISNORMAL:
14384 case BUILT_IN_ISGREATER:
14385 case BUILT_IN_ISGREATEREQUAL:
14386 case BUILT_IN_ISLESS:
14387 case BUILT_IN_ISLESSEQUAL:
14388 case BUILT_IN_ISLESSGREATER:
14389 case BUILT_IN_ISUNORDERED:
14390 case BUILT_IN_VA_ARG_PACK:
14391 case BUILT_IN_VA_ARG_PACK_LEN:
14392 case BUILT_IN_VA_COPY:
14393 case BUILT_IN_TRAP:
14394 case BUILT_IN_SAVEREGS:
14395 case BUILT_IN_POPCOUNTL:
14396 case BUILT_IN_POPCOUNTLL:
14397 case BUILT_IN_POPCOUNTIMAX:
14398 case BUILT_IN_POPCOUNT:
14399 case BUILT_IN_PARITYL:
14400 case BUILT_IN_PARITYLL:
14401 case BUILT_IN_PARITYIMAX:
14402 case BUILT_IN_PARITY:
14403 case BUILT_IN_LABS:
14404 case BUILT_IN_LLABS:
14405 case BUILT_IN_PREFETCH:
14406 return true;
14407
14408 default:
14409 return is_simple_builtin (decl);
14410 }
14411
14412 return false;
14413 }