builtins.c (expand_builtin_thread_pointer): Create a new target when the target is...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
62
63
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
65
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
70
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
74
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
77 {
78 #include "builtins.def"
79 };
80 #undef DEF_BUILTIN
81
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
85
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
88
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree, tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static void expand_builtin_update_setjmp_buf (rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strcmp (tree, rtx);
122 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
123 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_expect (location_t, tree, tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree fold_builtin_nan (tree, tree, int);
148 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
149 static bool validate_arg (const_tree, enum tree_code code);
150 static bool integer_valued_real_p (tree);
151 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
152 static bool readonly_data_expr (tree);
153 static rtx expand_builtin_fabs (tree, rtx, rtx);
154 static rtx expand_builtin_signbit (tree, rtx);
155 static tree fold_builtin_sqrt (location_t, tree, tree);
156 static tree fold_builtin_cbrt (location_t, tree, tree);
157 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
159 static tree fold_builtin_cos (location_t, tree, tree, tree);
160 static tree fold_builtin_cosh (location_t, tree, tree, tree);
161 static tree fold_builtin_tan (tree, tree);
162 static tree fold_builtin_trunc (location_t, tree, tree);
163 static tree fold_builtin_floor (location_t, tree, tree);
164 static tree fold_builtin_ceil (location_t, tree, tree);
165 static tree fold_builtin_round (location_t, tree, tree);
166 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
167 static tree fold_builtin_bitop (tree, tree);
168 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
169 static tree fold_builtin_strchr (location_t, tree, tree, tree);
170 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
172 static tree fold_builtin_strcmp (location_t, tree, tree);
173 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
174 static tree fold_builtin_signbit (location_t, tree, tree);
175 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_isascii (location_t, tree);
177 static tree fold_builtin_toascii (location_t, tree);
178 static tree fold_builtin_isdigit (location_t, tree);
179 static tree fold_builtin_fabs (location_t, tree, tree);
180 static tree fold_builtin_abs (location_t, tree, tree);
181 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
182 enum tree_code);
183 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
184 static tree fold_builtin_0 (location_t, tree, bool);
185 static tree fold_builtin_1 (location_t, tree, tree, bool);
186 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
187 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
188 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
189 static tree fold_builtin_varargs (location_t, tree, tree, bool);
190
191 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
192 static tree fold_builtin_strstr (location_t, tree, tree, tree);
193 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
194 static tree fold_builtin_strncat (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
198 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
199
200 static rtx expand_builtin_object_size (tree);
201 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
202 enum built_in_function);
203 static void maybe_emit_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
205 static void maybe_emit_free_warning (tree);
206 static tree fold_builtin_object_size (tree, tree);
207 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
208 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
209 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
210 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
211 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
212 enum built_in_function);
213 static bool init_target_chars (void);
214
215 static unsigned HOST_WIDE_INT target_newline;
216 static unsigned HOST_WIDE_INT target_percent;
217 static unsigned HOST_WIDE_INT target_c;
218 static unsigned HOST_WIDE_INT target_s;
219 static char target_percent_c[3];
220 static char target_percent_s[3];
221 static char target_percent_s_newline[4];
222 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_arg2 (tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_arg3 (tree, tree, tree, tree,
227 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
228 static tree do_mpfr_sincos (tree, tree, tree);
229 static tree do_mpfr_bessel_n (tree, tree, tree,
230 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
231 const REAL_VALUE_TYPE *, bool);
232 static tree do_mpfr_remquo (tree, tree, tree);
233 static tree do_mpfr_lgamma_r (tree, tree, tree);
234 static void expand_builtin_sync_synchronize (void);
235
236 /* Return true if NAME starts with __builtin_ or __sync_. */
237
238 static bool
239 is_builtin_name (const char *name)
240 {
241 if (strncmp (name, "__builtin_", 10) == 0)
242 return true;
243 if (strncmp (name, "__sync_", 7) == 0)
244 return true;
245 if (strncmp (name, "__atomic_", 9) == 0)
246 return true;
247 if (flag_cilkplus
248 && (!strcmp (name, "__cilkrts_detach")
249 || !strcmp (name, "__cilkrts_pop_frame")))
250 return true;
251 return false;
252 }
253
254
255 /* Return true if DECL is a function symbol representing a built-in. */
256
257 bool
258 is_builtin_fn (tree decl)
259 {
260 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
261 }
262
263 /* By default we assume that c99 functions are present at the runtime,
264 but sincos is not. */
265 bool
266 default_libc_has_function (enum function_class fn_class)
267 {
268 if (fn_class == function_c94
269 || fn_class == function_c99_misc
270 || fn_class == function_c99_math_complex)
271 return true;
272
273 return false;
274 }
275
276 bool
277 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
278 {
279 return true;
280 }
281
282 bool
283 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
284 {
285 return false;
286 }
287
288 /* Return true if NODE should be considered for inline expansion regardless
289 of the optimization level. This means whenever a function is invoked with
290 its "internal" name, which normally contains the prefix "__builtin". */
291
292 static bool
293 called_as_built_in (tree node)
294 {
295 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
296 we want the name used to call the function, not the name it
297 will have. */
298 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
299 return is_builtin_name (name);
300 }
301
302 /* Compute values M and N such that M divides (address of EXP - N) and such
303 that N < M. If these numbers can be determined, store M in alignp and N in
304 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
305 *alignp and any bit-offset to *bitposp.
306
307 Note that the address (and thus the alignment) computed here is based
308 on the address to which a symbol resolves, whereas DECL_ALIGN is based
309 on the address at which an object is actually located. These two
310 addresses are not always the same. For example, on ARM targets,
311 the address &foo of a Thumb function foo() has the lowest bit set,
312 whereas foo() itself starts on an even address.
313
314 If ADDR_P is true we are taking the address of the memory reference EXP
315 and thus cannot rely on the access taking place. */
316
317 static bool
318 get_object_alignment_2 (tree exp, unsigned int *alignp,
319 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
320 {
321 HOST_WIDE_INT bitsize, bitpos;
322 tree offset;
323 enum machine_mode mode;
324 int unsignedp, volatilep;
325 unsigned int align = BITS_PER_UNIT;
326 bool known_alignment = false;
327
328 /* Get the innermost object and the constant (bitpos) and possibly
329 variable (offset) offset of the access. */
330 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
331 &mode, &unsignedp, &volatilep, true);
332
333 /* Extract alignment information from the innermost object and
334 possibly adjust bitpos and offset. */
335 if (TREE_CODE (exp) == FUNCTION_DECL)
336 {
337 /* Function addresses can encode extra information besides their
338 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
339 allows the low bit to be used as a virtual bit, we know
340 that the address itself must be at least 2-byte aligned. */
341 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
342 align = 2 * BITS_PER_UNIT;
343 }
344 else if (TREE_CODE (exp) == LABEL_DECL)
345 ;
346 else if (TREE_CODE (exp) == CONST_DECL)
347 {
348 /* The alignment of a CONST_DECL is determined by its initializer. */
349 exp = DECL_INITIAL (exp);
350 align = TYPE_ALIGN (TREE_TYPE (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 if (CONSTANT_CLASS_P (exp))
353 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
354 #endif
355 known_alignment = true;
356 }
357 else if (DECL_P (exp))
358 {
359 align = DECL_ALIGN (exp);
360 known_alignment = true;
361 }
362 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
363 {
364 align = TYPE_ALIGN (TREE_TYPE (exp));
365 }
366 else if (TREE_CODE (exp) == INDIRECT_REF
367 || TREE_CODE (exp) == MEM_REF
368 || TREE_CODE (exp) == TARGET_MEM_REF)
369 {
370 tree addr = TREE_OPERAND (exp, 0);
371 unsigned ptr_align;
372 unsigned HOST_WIDE_INT ptr_bitpos;
373
374 if (TREE_CODE (addr) == BIT_AND_EXPR
375 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
376 {
377 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
378 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
379 align *= BITS_PER_UNIT;
380 addr = TREE_OPERAND (addr, 0);
381 }
382
383 known_alignment
384 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
385 align = MAX (ptr_align, align);
386
387 /* The alignment of the pointer operand in a TARGET_MEM_REF
388 has to take the variable offset parts into account. */
389 if (TREE_CODE (exp) == TARGET_MEM_REF)
390 {
391 if (TMR_INDEX (exp))
392 {
393 unsigned HOST_WIDE_INT step = 1;
394 if (TMR_STEP (exp))
395 step = TREE_INT_CST_LOW (TMR_STEP (exp));
396 align = MIN (align, (step & -step) * BITS_PER_UNIT);
397 }
398 if (TMR_INDEX2 (exp))
399 align = BITS_PER_UNIT;
400 known_alignment = false;
401 }
402
403 /* When EXP is an actual memory reference then we can use
404 TYPE_ALIGN of a pointer indirection to derive alignment.
405 Do so only if get_pointer_alignment_1 did not reveal absolute
406 alignment knowledge and if using that alignment would
407 improve the situation. */
408 if (!addr_p && !known_alignment
409 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
410 align = TYPE_ALIGN (TREE_TYPE (exp));
411 else
412 {
413 /* Else adjust bitpos accordingly. */
414 bitpos += ptr_bitpos;
415 if (TREE_CODE (exp) == MEM_REF
416 || TREE_CODE (exp) == TARGET_MEM_REF)
417 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
418 }
419 }
420 else if (TREE_CODE (exp) == STRING_CST)
421 {
422 /* STRING_CST are the only constant objects we allow to be not
423 wrapped inside a CONST_DECL. */
424 align = TYPE_ALIGN (TREE_TYPE (exp));
425 #ifdef CONSTANT_ALIGNMENT
426 if (CONSTANT_CLASS_P (exp))
427 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
428 #endif
429 known_alignment = true;
430 }
431
432 /* If there is a non-constant offset part extract the maximum
433 alignment that can prevail. */
434 if (offset)
435 {
436 unsigned int trailing_zeros = tree_ctz (offset);
437 if (trailing_zeros < HOST_BITS_PER_INT)
438 {
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
442 }
443 }
444
445 *alignp = align;
446 *bitposp = bitpos & (*alignp - 1);
447 return known_alignment;
448 }
449
450 /* For a memory reference expression EXP compute values M and N such that M
451 divides (&EXP - N) and such that N < M. If these numbers can be determined,
452 store M in alignp and N in *BITPOSP and return true. Otherwise return false
453 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
454
455 bool
456 get_object_alignment_1 (tree exp, unsigned int *alignp,
457 unsigned HOST_WIDE_INT *bitposp)
458 {
459 return get_object_alignment_2 (exp, alignp, bitposp, false);
460 }
461
462 /* Return the alignment in bits of EXP, an object. */
463
464 unsigned int
465 get_object_alignment (tree exp)
466 {
467 unsigned HOST_WIDE_INT bitpos = 0;
468 unsigned int align;
469
470 get_object_alignment_1 (exp, &align, &bitpos);
471
472 /* align and bitpos now specify known low bits of the pointer.
473 ptr & (align - 1) == bitpos. */
474
475 if (bitpos != 0)
476 align = (bitpos & -bitpos);
477 return align;
478 }
479
480 /* For a pointer valued expression EXP compute values M and N such that M
481 divides (EXP - N) and such that N < M. If these numbers can be determined,
482 store M in alignp and N in *BITPOSP and return true. Return false if
483 the results are just a conservative approximation.
484
485 If EXP is not a pointer, false is returned too. */
486
487 bool
488 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
489 unsigned HOST_WIDE_INT *bitposp)
490 {
491 STRIP_NOPS (exp);
492
493 if (TREE_CODE (exp) == ADDR_EXPR)
494 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
495 alignp, bitposp, true);
496 else if (TREE_CODE (exp) == SSA_NAME
497 && POINTER_TYPE_P (TREE_TYPE (exp)))
498 {
499 unsigned int ptr_align, ptr_misalign;
500 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
501
502 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
503 {
504 *bitposp = ptr_misalign * BITS_PER_UNIT;
505 *alignp = ptr_align * BITS_PER_UNIT;
506 /* We cannot really tell whether this result is an approximation. */
507 return true;
508 }
509 else
510 {
511 *bitposp = 0;
512 *alignp = BITS_PER_UNIT;
513 return false;
514 }
515 }
516 else if (TREE_CODE (exp) == INTEGER_CST)
517 {
518 *alignp = BIGGEST_ALIGNMENT;
519 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
520 & (BIGGEST_ALIGNMENT - 1));
521 return true;
522 }
523
524 *bitposp = 0;
525 *alignp = BITS_PER_UNIT;
526 return false;
527 }
528
529 /* Return the alignment in bits of EXP, a pointer valued expression.
530 The alignment returned is, by default, the alignment of the thing that
531 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
532
533 Otherwise, look at the expression to see if we can do better, i.e., if the
534 expression is actually pointing at an object whose alignment is tighter. */
535
536 unsigned int
537 get_pointer_alignment (tree exp)
538 {
539 unsigned HOST_WIDE_INT bitpos = 0;
540 unsigned int align;
541
542 get_pointer_alignment_1 (exp, &align, &bitpos);
543
544 /* align and bitpos now specify known low bits of the pointer.
545 ptr & (align - 1) == bitpos. */
546
547 if (bitpos != 0)
548 align = (bitpos & -bitpos);
549
550 return align;
551 }
552
553 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
554 way, because it could contain a zero byte in the middle.
555 TREE_STRING_LENGTH is the size of the character array, not the string.
556
557 ONLY_VALUE should be nonzero if the result is not going to be emitted
558 into the instruction stream and zero if it is going to be expanded.
559 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
560 is returned, otherwise NULL, since
561 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
562 evaluate the side-effects.
563
564 The value returned is of type `ssizetype'.
565
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
568
569 tree
570 c_strlen (tree src, int only_value)
571 {
572 tree offset_node;
573 HOST_WIDE_INT offset;
574 int max;
575 const char *ptr;
576 location_t loc;
577
578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 {
582 tree len1, len2;
583
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
586 if (tree_int_cst_equal (len1, len2))
587 return len1;
588 }
589
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
593
594 loc = EXPR_LOC_OR_LOC (src, input_location);
595
596 src = string_constant (src, &offset_node);
597 if (src == 0)
598 return NULL_TREE;
599
600 max = TREE_STRING_LENGTH (src) - 1;
601 ptr = TREE_STRING_POINTER (src);
602
603 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
604 {
605 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
606 compute the offset to the following null if we don't know where to
607 start searching for it. */
608 int i;
609
610 for (i = 0; i < max; i++)
611 if (ptr[i] == 0)
612 return NULL_TREE;
613
614 /* We don't know the starting offset, but we do know that the string
615 has no internal zero bytes. We can assume that the offset falls
616 within the bounds of the string; otherwise, the programmer deserves
617 what he gets. Subtract the offset from the length of the string,
618 and return that. This would perhaps not be valid if we were dealing
619 with named arrays in addition to literal string constants. */
620
621 return size_diffop_loc (loc, size_int (max), offset_node);
622 }
623
624 /* We have a known offset into the string. Start searching there for
625 a null character if we can represent it as a single HOST_WIDE_INT. */
626 if (offset_node == 0)
627 offset = 0;
628 else if (! tree_fits_shwi_p (offset_node))
629 offset = -1;
630 else
631 offset = tree_to_shwi (offset_node);
632
633 /* If the offset is known to be out of bounds, warn, and call strlen at
634 runtime. */
635 if (offset < 0 || offset > max)
636 {
637 /* Suppress multiple warnings for propagated constant strings. */
638 if (! TREE_NO_WARNING (src))
639 {
640 warning_at (loc, 0, "offset outside bounds of constant string");
641 TREE_NO_WARNING (src) = 1;
642 }
643 return NULL_TREE;
644 }
645
646 /* Use strlen to search for the first zero byte. Since any strings
647 constructed with build_string will have nulls appended, we win even
648 if we get handed something like (char[4])"abcd".
649
650 Since OFFSET is our starting index into the string, no further
651 calculation is needed. */
652 return ssize_int (strlen (ptr + offset));
653 }
654
655 /* Return a char pointer for a C string if it is a string constant
656 or sum of string constant and integer constant. */
657
658 static const char *
659 c_getstr (tree src)
660 {
661 tree offset_node;
662
663 src = string_constant (src, &offset_node);
664 if (src == 0)
665 return 0;
666
667 if (offset_node == 0)
668 return TREE_STRING_POINTER (src);
669 else if (!tree_fits_uhwi_p (offset_node)
670 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
671 return 0;
672
673 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
674 }
675
676 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
677 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
678
679 static rtx
680 c_readstr (const char *str, enum machine_mode mode)
681 {
682 HOST_WIDE_INT c[2];
683 HOST_WIDE_INT ch;
684 unsigned int i, j;
685
686 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
687
688 c[0] = 0;
689 c[1] = 0;
690 ch = 1;
691 for (i = 0; i < GET_MODE_SIZE (mode); i++)
692 {
693 j = i;
694 if (WORDS_BIG_ENDIAN)
695 j = GET_MODE_SIZE (mode) - i - 1;
696 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
698 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
699 j *= BITS_PER_UNIT;
700 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
701
702 if (ch)
703 ch = (unsigned char) str[i];
704 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
705 }
706 return immed_double_const (c[0], c[1], mode);
707 }
708
709 /* Cast a target constant CST to target CHAR and if that value fits into
710 host char type, return zero and put that value into variable pointed to by
711 P. */
712
713 static int
714 target_char_cast (tree cst, char *p)
715 {
716 unsigned HOST_WIDE_INT val, hostval;
717
718 if (TREE_CODE (cst) != INTEGER_CST
719 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
720 return 1;
721
722 val = TREE_INT_CST_LOW (cst);
723 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
724 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
725
726 hostval = val;
727 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
728 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
729
730 if (val != hostval)
731 return 1;
732
733 *p = hostval;
734 return 0;
735 }
736
737 /* Similar to save_expr, but assumes that arbitrary code is not executed
738 in between the multiple evaluations. In particular, we assume that a
739 non-addressable local variable will not be modified. */
740
741 static tree
742 builtin_save_expr (tree exp)
743 {
744 if (TREE_CODE (exp) == SSA_NAME
745 || (TREE_ADDRESSABLE (exp) == 0
746 && (TREE_CODE (exp) == PARM_DECL
747 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
748 return exp;
749
750 return save_expr (exp);
751 }
752
753 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
754 times to get the address of either a higher stack frame, or a return
755 address located within it (depending on FNDECL_CODE). */
756
757 static rtx
758 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
759 {
760 int i;
761
762 #ifdef INITIAL_FRAME_ADDRESS_RTX
763 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
764 #else
765 rtx tem;
766
767 /* For a zero count with __builtin_return_address, we don't care what
768 frame address we return, because target-specific definitions will
769 override us. Therefore frame pointer elimination is OK, and using
770 the soft frame pointer is OK.
771
772 For a nonzero count, or a zero count with __builtin_frame_address,
773 we require a stable offset from the current frame pointer to the
774 previous one, so we must use the hard frame pointer, and
775 we must disable frame pointer elimination. */
776 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
777 tem = frame_pointer_rtx;
778 else
779 {
780 tem = hard_frame_pointer_rtx;
781
782 /* Tell reload not to eliminate the frame pointer. */
783 crtl->accesses_prior_frames = 1;
784 }
785 #endif
786
787 /* Some machines need special handling before we can access
788 arbitrary frames. For example, on the SPARC, we must first flush
789 all register windows to the stack. */
790 #ifdef SETUP_FRAME_ADDRESSES
791 if (count > 0)
792 SETUP_FRAME_ADDRESSES ();
793 #endif
794
795 /* On the SPARC, the return address is not in the frame, it is in a
796 register. There is no way to access it off of the current frame
797 pointer, but it can be accessed off the previous frame pointer by
798 reading the value from the register window save area. */
799 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
800 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
801 count--;
802 #endif
803
804 /* Scan back COUNT frames to the specified frame. */
805 for (i = 0; i < count; i++)
806 {
807 /* Assume the dynamic chain pointer is in the word that the
808 frame address points to, unless otherwise specified. */
809 #ifdef DYNAMIC_CHAIN_ADDRESS
810 tem = DYNAMIC_CHAIN_ADDRESS (tem);
811 #endif
812 tem = memory_address (Pmode, tem);
813 tem = gen_frame_mem (Pmode, tem);
814 tem = copy_to_reg (tem);
815 }
816
817 /* For __builtin_frame_address, return what we've got. But, on
818 the SPARC for example, we may have to add a bias. */
819 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
820 #ifdef FRAME_ADDR_RTX
821 return FRAME_ADDR_RTX (tem);
822 #else
823 return tem;
824 #endif
825
826 /* For __builtin_return_address, get the return address from that frame. */
827 #ifdef RETURN_ADDR_RTX
828 tem = RETURN_ADDR_RTX (count, tem);
829 #else
830 tem = memory_address (Pmode,
831 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
832 tem = gen_frame_mem (Pmode, tem);
833 #endif
834 return tem;
835 }
836
837 /* Alias set used for setjmp buffer. */
838 static alias_set_type setjmp_alias_set = -1;
839
840 /* Construct the leading half of a __builtin_setjmp call. Control will
841 return to RECEIVER_LABEL. This is also called directly by the SJLJ
842 exception handling code. */
843
844 void
845 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
846 {
847 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
848 rtx stack_save;
849 rtx mem;
850
851 if (setjmp_alias_set == -1)
852 setjmp_alias_set = new_alias_set ();
853
854 buf_addr = convert_memory_address (Pmode, buf_addr);
855
856 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
857
858 /* We store the frame pointer and the address of receiver_label in
859 the buffer and use the rest of it for the stack save area, which
860 is machine-dependent. */
861
862 mem = gen_rtx_MEM (Pmode, buf_addr);
863 set_mem_alias_set (mem, setjmp_alias_set);
864 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
865
866 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
867 GET_MODE_SIZE (Pmode))),
868 set_mem_alias_set (mem, setjmp_alias_set);
869
870 emit_move_insn (validize_mem (mem),
871 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
872
873 stack_save = gen_rtx_MEM (sa_mode,
874 plus_constant (Pmode, buf_addr,
875 2 * GET_MODE_SIZE (Pmode)));
876 set_mem_alias_set (stack_save, setjmp_alias_set);
877 emit_stack_save (SAVE_NONLOCAL, &stack_save);
878
879 /* If there is further processing to do, do it. */
880 #ifdef HAVE_builtin_setjmp_setup
881 if (HAVE_builtin_setjmp_setup)
882 emit_insn (gen_builtin_setjmp_setup (buf_addr));
883 #endif
884
885 /* We have a nonlocal label. */
886 cfun->has_nonlocal_label = 1;
887 }
888
889 /* Construct the trailing part of a __builtin_setjmp call. This is
890 also called directly by the SJLJ exception handling code.
891 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
892
893 void
894 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
895 {
896 rtx chain;
897
898 /* Mark the FP as used when we get here, so we have to make sure it's
899 marked as used by this function. */
900 emit_use (hard_frame_pointer_rtx);
901
902 /* Mark the static chain as clobbered here so life information
903 doesn't get messed up for it. */
904 chain = targetm.calls.static_chain (current_function_decl, true);
905 if (chain && REG_P (chain))
906 emit_clobber (chain);
907
908 /* Now put in the code to restore the frame pointer, and argument
909 pointer, if needed. */
910 #ifdef HAVE_nonlocal_goto
911 if (! HAVE_nonlocal_goto)
912 #endif
913 /* First adjust our frame pointer to its actual value. It was
914 previously set to the start of the virtual area corresponding to
915 the stacked variables when we branched here and now needs to be
916 adjusted to the actual hardware fp value.
917
918 Assignments to virtual registers are converted by
919 instantiate_virtual_regs into the corresponding assignment
920 to the underlying register (fp in this case) that makes
921 the original assignment true.
922 So the following insn will actually be decrementing fp by
923 STARTING_FRAME_OFFSET. */
924 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
925
926 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
927 if (fixed_regs[ARG_POINTER_REGNUM])
928 {
929 #ifdef ELIMINABLE_REGS
930 /* If the argument pointer can be eliminated in favor of the
931 frame pointer, we don't need to restore it. We assume here
932 that if such an elimination is present, it can always be used.
933 This is the case on all known machines; if we don't make this
934 assumption, we do unnecessary saving on many machines. */
935 size_t i;
936 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
937
938 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
939 if (elim_regs[i].from == ARG_POINTER_REGNUM
940 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
941 break;
942
943 if (i == ARRAY_SIZE (elim_regs))
944 #endif
945 {
946 /* Now restore our arg pointer from the address at which it
947 was saved in our stack frame. */
948 emit_move_insn (crtl->args.internal_arg_pointer,
949 copy_to_reg (get_arg_pointer_save_area ()));
950 }
951 }
952 #endif
953
954 #ifdef HAVE_builtin_setjmp_receiver
955 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
956 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
957 else
958 #endif
959 #ifdef HAVE_nonlocal_goto_receiver
960 if (HAVE_nonlocal_goto_receiver)
961 emit_insn (gen_nonlocal_goto_receiver ());
962 else
963 #endif
964 { /* Nothing */ }
965
966 /* We must not allow the code we just generated to be reordered by
967 scheduling. Specifically, the update of the frame pointer must
968 happen immediately, not later. Similarly, we must block
969 (frame-related) register values to be used across this code. */
970 emit_insn (gen_blockage ());
971 }
972
973 /* __builtin_longjmp is passed a pointer to an array of five words (not
974 all will be used on all machines). It operates similarly to the C
975 library function of the same name, but is more efficient. Much of
976 the code below is copied from the handling of non-local gotos. */
977
978 static void
979 expand_builtin_longjmp (rtx buf_addr, rtx value)
980 {
981 rtx fp, lab, stack, insn, last;
982 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
983
984 /* DRAP is needed for stack realign if longjmp is expanded to current
985 function */
986 if (SUPPORTS_STACK_ALIGNMENT)
987 crtl->need_drap = true;
988
989 if (setjmp_alias_set == -1)
990 setjmp_alias_set = new_alias_set ();
991
992 buf_addr = convert_memory_address (Pmode, buf_addr);
993
994 buf_addr = force_reg (Pmode, buf_addr);
995
996 /* We require that the user must pass a second argument of 1, because
997 that is what builtin_setjmp will return. */
998 gcc_assert (value == const1_rtx);
999
1000 last = get_last_insn ();
1001 #ifdef HAVE_builtin_longjmp
1002 if (HAVE_builtin_longjmp)
1003 emit_insn (gen_builtin_longjmp (buf_addr));
1004 else
1005 #endif
1006 {
1007 fp = gen_rtx_MEM (Pmode, buf_addr);
1008 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1009 GET_MODE_SIZE (Pmode)));
1010
1011 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1012 2 * GET_MODE_SIZE (Pmode)));
1013 set_mem_alias_set (fp, setjmp_alias_set);
1014 set_mem_alias_set (lab, setjmp_alias_set);
1015 set_mem_alias_set (stack, setjmp_alias_set);
1016
1017 /* Pick up FP, label, and SP from the block and jump. This code is
1018 from expand_goto in stmt.c; see there for detailed comments. */
1019 #ifdef HAVE_nonlocal_goto
1020 if (HAVE_nonlocal_goto)
1021 /* We have to pass a value to the nonlocal_goto pattern that will
1022 get copied into the static_chain pointer, but it does not matter
1023 what that value is, because builtin_setjmp does not use it. */
1024 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1025 else
1026 #endif
1027 {
1028 lab = copy_to_reg (lab);
1029
1030 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1031 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1032
1033 emit_move_insn (hard_frame_pointer_rtx, fp);
1034 emit_stack_restore (SAVE_NONLOCAL, stack);
1035
1036 emit_use (hard_frame_pointer_rtx);
1037 emit_use (stack_pointer_rtx);
1038 emit_indirect_jump (lab);
1039 }
1040 }
1041
1042 /* Search backwards and mark the jump insn as a non-local goto.
1043 Note that this precludes the use of __builtin_longjmp to a
1044 __builtin_setjmp target in the same function. However, we've
1045 already cautioned the user that these functions are for
1046 internal exception handling use only. */
1047 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1048 {
1049 gcc_assert (insn != last);
1050
1051 if (JUMP_P (insn))
1052 {
1053 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1054 break;
1055 }
1056 else if (CALL_P (insn))
1057 break;
1058 }
1059 }
1060
1061 static inline bool
1062 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1063 {
1064 return (iter->i < iter->n);
1065 }
1066
1067 /* This function validates the types of a function call argument list
1068 against a specified list of tree_codes. If the last specifier is a 0,
1069 that represents an ellipses, otherwise the last specifier must be a
1070 VOID_TYPE. */
1071
1072 static bool
1073 validate_arglist (const_tree callexpr, ...)
1074 {
1075 enum tree_code code;
1076 bool res = 0;
1077 va_list ap;
1078 const_call_expr_arg_iterator iter;
1079 const_tree arg;
1080
1081 va_start (ap, callexpr);
1082 init_const_call_expr_arg_iterator (callexpr, &iter);
1083
1084 do
1085 {
1086 code = (enum tree_code) va_arg (ap, int);
1087 switch (code)
1088 {
1089 case 0:
1090 /* This signifies an ellipses, any further arguments are all ok. */
1091 res = true;
1092 goto end;
1093 case VOID_TYPE:
1094 /* This signifies an endlink, if no arguments remain, return
1095 true, otherwise return false. */
1096 res = !more_const_call_expr_args_p (&iter);
1097 goto end;
1098 default:
1099 /* If no parameters remain or the parameter's code does not
1100 match the specified code, return false. Otherwise continue
1101 checking any remaining arguments. */
1102 arg = next_const_call_expr_arg (&iter);
1103 if (!validate_arg (arg, code))
1104 goto end;
1105 break;
1106 }
1107 }
1108 while (1);
1109
1110 /* We need gotos here since we can only have one VA_CLOSE in a
1111 function. */
1112 end: ;
1113 va_end (ap);
1114
1115 return res;
1116 }
1117
1118 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1119 and the address of the save area. */
1120
1121 static rtx
1122 expand_builtin_nonlocal_goto (tree exp)
1123 {
1124 tree t_label, t_save_area;
1125 rtx r_label, r_save_area, r_fp, r_sp, insn;
1126
1127 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1128 return NULL_RTX;
1129
1130 t_label = CALL_EXPR_ARG (exp, 0);
1131 t_save_area = CALL_EXPR_ARG (exp, 1);
1132
1133 r_label = expand_normal (t_label);
1134 r_label = convert_memory_address (Pmode, r_label);
1135 r_save_area = expand_normal (t_save_area);
1136 r_save_area = convert_memory_address (Pmode, r_save_area);
1137 /* Copy the address of the save location to a register just in case it was
1138 based on the frame pointer. */
1139 r_save_area = copy_to_reg (r_save_area);
1140 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1141 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1142 plus_constant (Pmode, r_save_area,
1143 GET_MODE_SIZE (Pmode)));
1144
1145 crtl->has_nonlocal_goto = 1;
1146
1147 #ifdef HAVE_nonlocal_goto
1148 /* ??? We no longer need to pass the static chain value, afaik. */
1149 if (HAVE_nonlocal_goto)
1150 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1151 else
1152 #endif
1153 {
1154 r_label = copy_to_reg (r_label);
1155
1156 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1157 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1158
1159 /* Restore frame pointer for containing function. */
1160 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1161 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1162
1163 /* USE of hard_frame_pointer_rtx added for consistency;
1164 not clear if really needed. */
1165 emit_use (hard_frame_pointer_rtx);
1166 emit_use (stack_pointer_rtx);
1167
1168 /* If the architecture is using a GP register, we must
1169 conservatively assume that the target function makes use of it.
1170 The prologue of functions with nonlocal gotos must therefore
1171 initialize the GP register to the appropriate value, and we
1172 must then make sure that this value is live at the point
1173 of the jump. (Note that this doesn't necessarily apply
1174 to targets with a nonlocal_goto pattern; they are free
1175 to implement it in their own way. Note also that this is
1176 a no-op if the GP register is a global invariant.) */
1177 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1178 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1179 emit_use (pic_offset_table_rtx);
1180
1181 emit_indirect_jump (r_label);
1182 }
1183
1184 /* Search backwards to the jump insn and mark it as a
1185 non-local goto. */
1186 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1187 {
1188 if (JUMP_P (insn))
1189 {
1190 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1191 break;
1192 }
1193 else if (CALL_P (insn))
1194 break;
1195 }
1196
1197 return const0_rtx;
1198 }
1199
1200 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1201 (not all will be used on all machines) that was passed to __builtin_setjmp.
1202 It updates the stack pointer in that block to correspond to the current
1203 stack pointer. */
1204
1205 static void
1206 expand_builtin_update_setjmp_buf (rtx buf_addr)
1207 {
1208 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1209 rtx stack_save
1210 = gen_rtx_MEM (sa_mode,
1211 memory_address
1212 (sa_mode,
1213 plus_constant (Pmode, buf_addr,
1214 2 * GET_MODE_SIZE (Pmode))));
1215
1216 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1217 }
1218
1219 /* Expand a call to __builtin_prefetch. For a target that does not support
1220 data prefetch, evaluate the memory address argument in case it has side
1221 effects. */
1222
1223 static void
1224 expand_builtin_prefetch (tree exp)
1225 {
1226 tree arg0, arg1, arg2;
1227 int nargs;
1228 rtx op0, op1, op2;
1229
1230 if (!validate_arglist (exp, POINTER_TYPE, 0))
1231 return;
1232
1233 arg0 = CALL_EXPR_ARG (exp, 0);
1234
1235 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1236 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1237 locality). */
1238 nargs = call_expr_nargs (exp);
1239 if (nargs > 1)
1240 arg1 = CALL_EXPR_ARG (exp, 1);
1241 else
1242 arg1 = integer_zero_node;
1243 if (nargs > 2)
1244 arg2 = CALL_EXPR_ARG (exp, 2);
1245 else
1246 arg2 = integer_three_node;
1247
1248 /* Argument 0 is an address. */
1249 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1250
1251 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1252 if (TREE_CODE (arg1) != INTEGER_CST)
1253 {
1254 error ("second argument to %<__builtin_prefetch%> must be a constant");
1255 arg1 = integer_zero_node;
1256 }
1257 op1 = expand_normal (arg1);
1258 /* Argument 1 must be either zero or one. */
1259 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1260 {
1261 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1262 " using zero");
1263 op1 = const0_rtx;
1264 }
1265
1266 /* Argument 2 (locality) must be a compile-time constant int. */
1267 if (TREE_CODE (arg2) != INTEGER_CST)
1268 {
1269 error ("third argument to %<__builtin_prefetch%> must be a constant");
1270 arg2 = integer_zero_node;
1271 }
1272 op2 = expand_normal (arg2);
1273 /* Argument 2 must be 0, 1, 2, or 3. */
1274 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1275 {
1276 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1277 op2 = const0_rtx;
1278 }
1279
1280 #ifdef HAVE_prefetch
1281 if (HAVE_prefetch)
1282 {
1283 struct expand_operand ops[3];
1284
1285 create_address_operand (&ops[0], op0);
1286 create_integer_operand (&ops[1], INTVAL (op1));
1287 create_integer_operand (&ops[2], INTVAL (op2));
1288 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1289 return;
1290 }
1291 #endif
1292
1293 /* Don't do anything with direct references to volatile memory, but
1294 generate code to handle other side effects. */
1295 if (!MEM_P (op0) && side_effects_p (op0))
1296 emit_insn (op0);
1297 }
1298
1299 /* Get a MEM rtx for expression EXP which is the address of an operand
1300 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1301 the maximum length of the block of memory that might be accessed or
1302 NULL if unknown. */
1303
1304 static rtx
1305 get_memory_rtx (tree exp, tree len)
1306 {
1307 tree orig_exp = exp;
1308 rtx addr, mem;
1309
1310 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1311 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1312 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1313 exp = TREE_OPERAND (exp, 0);
1314
1315 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1316 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1317
1318 /* Get an expression we can use to find the attributes to assign to MEM.
1319 First remove any nops. */
1320 while (CONVERT_EXPR_P (exp)
1321 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1322 exp = TREE_OPERAND (exp, 0);
1323
1324 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1325 (as builtin stringops may alias with anything). */
1326 exp = fold_build2 (MEM_REF,
1327 build_array_type (char_type_node,
1328 build_range_type (sizetype,
1329 size_one_node, len)),
1330 exp, build_int_cst (ptr_type_node, 0));
1331
1332 /* If the MEM_REF has no acceptable address, try to get the base object
1333 from the original address we got, and build an all-aliasing
1334 unknown-sized access to that one. */
1335 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1336 set_mem_attributes (mem, exp, 0);
1337 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1338 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1339 0))))
1340 {
1341 exp = build_fold_addr_expr (exp);
1342 exp = fold_build2 (MEM_REF,
1343 build_array_type (char_type_node,
1344 build_range_type (sizetype,
1345 size_zero_node,
1346 NULL)),
1347 exp, build_int_cst (ptr_type_node, 0));
1348 set_mem_attributes (mem, exp, 0);
1349 }
1350 set_mem_alias_set (mem, 0);
1351 return mem;
1352 }
1353 \f
1354 /* Built-in functions to perform an untyped call and return. */
1355
1356 #define apply_args_mode \
1357 (this_target_builtins->x_apply_args_mode)
1358 #define apply_result_mode \
1359 (this_target_builtins->x_apply_result_mode)
1360
1361 /* Return the size required for the block returned by __builtin_apply_args,
1362 and initialize apply_args_mode. */
1363
1364 static int
1365 apply_args_size (void)
1366 {
1367 static int size = -1;
1368 int align;
1369 unsigned int regno;
1370 enum machine_mode mode;
1371
1372 /* The values computed by this function never change. */
1373 if (size < 0)
1374 {
1375 /* The first value is the incoming arg-pointer. */
1376 size = GET_MODE_SIZE (Pmode);
1377
1378 /* The second value is the structure value address unless this is
1379 passed as an "invisible" first argument. */
1380 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1381 size += GET_MODE_SIZE (Pmode);
1382
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (FUNCTION_ARG_REGNO_P (regno))
1385 {
1386 mode = targetm.calls.get_raw_arg_mode (regno);
1387
1388 gcc_assert (mode != VOIDmode);
1389
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_args_mode[regno] = mode;
1395 }
1396 else
1397 {
1398 apply_args_mode[regno] = VOIDmode;
1399 }
1400 }
1401 return size;
1402 }
1403
1404 /* Return the size required for the block returned by __builtin_apply,
1405 and initialize apply_result_mode. */
1406
1407 static int
1408 apply_result_size (void)
1409 {
1410 static int size = -1;
1411 int align, regno;
1412 enum machine_mode mode;
1413
1414 /* The values computed by this function never change. */
1415 if (size < 0)
1416 {
1417 size = 0;
1418
1419 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1420 if (targetm.calls.function_value_regno_p (regno))
1421 {
1422 mode = targetm.calls.get_raw_result_mode (regno);
1423
1424 gcc_assert (mode != VOIDmode);
1425
1426 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1427 if (size % align != 0)
1428 size = CEIL (size, align) * align;
1429 size += GET_MODE_SIZE (mode);
1430 apply_result_mode[regno] = mode;
1431 }
1432 else
1433 apply_result_mode[regno] = VOIDmode;
1434
1435 /* Allow targets that use untyped_call and untyped_return to override
1436 the size so that machine-specific information can be stored here. */
1437 #ifdef APPLY_RESULT_SIZE
1438 size = APPLY_RESULT_SIZE;
1439 #endif
1440 }
1441 return size;
1442 }
1443
1444 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1445 /* Create a vector describing the result block RESULT. If SAVEP is true,
1446 the result block is used to save the values; otherwise it is used to
1447 restore the values. */
1448
1449 static rtx
1450 result_vector (int savep, rtx result)
1451 {
1452 int regno, size, align, nelts;
1453 enum machine_mode mode;
1454 rtx reg, mem;
1455 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1456
1457 size = nelts = 0;
1458 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1459 if ((mode = apply_result_mode[regno]) != VOIDmode)
1460 {
1461 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1462 if (size % align != 0)
1463 size = CEIL (size, align) * align;
1464 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1465 mem = adjust_address (result, mode, size);
1466 savevec[nelts++] = (savep
1467 ? gen_rtx_SET (VOIDmode, mem, reg)
1468 : gen_rtx_SET (VOIDmode, reg, mem));
1469 size += GET_MODE_SIZE (mode);
1470 }
1471 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1472 }
1473 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1474
1475 /* Save the state required to perform an untyped call with the same
1476 arguments as were passed to the current function. */
1477
1478 static rtx
1479 expand_builtin_apply_args_1 (void)
1480 {
1481 rtx registers, tem;
1482 int size, align, regno;
1483 enum machine_mode mode;
1484 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1485
1486 /* Create a block where the arg-pointer, structure value address,
1487 and argument registers can be saved. */
1488 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1489
1490 /* Walk past the arg-pointer and structure value address. */
1491 size = GET_MODE_SIZE (Pmode);
1492 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1493 size += GET_MODE_SIZE (Pmode);
1494
1495 /* Save each register used in calling a function to the block. */
1496 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1497 if ((mode = apply_args_mode[regno]) != VOIDmode)
1498 {
1499 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1500 if (size % align != 0)
1501 size = CEIL (size, align) * align;
1502
1503 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1504
1505 emit_move_insn (adjust_address (registers, mode, size), tem);
1506 size += GET_MODE_SIZE (mode);
1507 }
1508
1509 /* Save the arg pointer to the block. */
1510 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1511 #ifdef STACK_GROWS_DOWNWARD
1512 /* We need the pointer as the caller actually passed them to us, not
1513 as we might have pretended they were passed. Make sure it's a valid
1514 operand, as emit_move_insn isn't expected to handle a PLUS. */
1515 tem
1516 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1517 NULL_RTX);
1518 #endif
1519 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1520
1521 size = GET_MODE_SIZE (Pmode);
1522
1523 /* Save the structure value address unless this is passed as an
1524 "invisible" first argument. */
1525 if (struct_incoming_value)
1526 {
1527 emit_move_insn (adjust_address (registers, Pmode, size),
1528 copy_to_reg (struct_incoming_value));
1529 size += GET_MODE_SIZE (Pmode);
1530 }
1531
1532 /* Return the address of the block. */
1533 return copy_addr_to_reg (XEXP (registers, 0));
1534 }
1535
1536 /* __builtin_apply_args returns block of memory allocated on
1537 the stack into which is stored the arg pointer, structure
1538 value address, static chain, and all the registers that might
1539 possibly be used in performing a function call. The code is
1540 moved to the start of the function so the incoming values are
1541 saved. */
1542
1543 static rtx
1544 expand_builtin_apply_args (void)
1545 {
1546 /* Don't do __builtin_apply_args more than once in a function.
1547 Save the result of the first call and reuse it. */
1548 if (apply_args_value != 0)
1549 return apply_args_value;
1550 {
1551 /* When this function is called, it means that registers must be
1552 saved on entry to this function. So we migrate the
1553 call to the first insn of this function. */
1554 rtx temp;
1555 rtx seq;
1556
1557 start_sequence ();
1558 temp = expand_builtin_apply_args_1 ();
1559 seq = get_insns ();
1560 end_sequence ();
1561
1562 apply_args_value = temp;
1563
1564 /* Put the insns after the NOTE that starts the function.
1565 If this is inside a start_sequence, make the outer-level insn
1566 chain current, so the code is placed at the start of the
1567 function. If internal_arg_pointer is a non-virtual pseudo,
1568 it needs to be placed after the function that initializes
1569 that pseudo. */
1570 push_topmost_sequence ();
1571 if (REG_P (crtl->args.internal_arg_pointer)
1572 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1573 emit_insn_before (seq, parm_birth_insn);
1574 else
1575 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1576 pop_topmost_sequence ();
1577 return temp;
1578 }
1579 }
1580
1581 /* Perform an untyped call and save the state required to perform an
1582 untyped return of whatever value was returned by the given function. */
1583
1584 static rtx
1585 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1586 {
1587 int size, align, regno;
1588 enum machine_mode mode;
1589 rtx incoming_args, result, reg, dest, src, call_insn;
1590 rtx old_stack_level = 0;
1591 rtx call_fusage = 0;
1592 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1593
1594 arguments = convert_memory_address (Pmode, arguments);
1595
1596 /* Create a block where the return registers can be saved. */
1597 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1598
1599 /* Fetch the arg pointer from the ARGUMENTS block. */
1600 incoming_args = gen_reg_rtx (Pmode);
1601 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1602 #ifndef STACK_GROWS_DOWNWARD
1603 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1604 incoming_args, 0, OPTAB_LIB_WIDEN);
1605 #endif
1606
1607 /* Push a new argument block and copy the arguments. Do not allow
1608 the (potential) memcpy call below to interfere with our stack
1609 manipulations. */
1610 do_pending_stack_adjust ();
1611 NO_DEFER_POP;
1612
1613 /* Save the stack with nonlocal if available. */
1614 #ifdef HAVE_save_stack_nonlocal
1615 if (HAVE_save_stack_nonlocal)
1616 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1617 else
1618 #endif
1619 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1620
1621 /* Allocate a block of memory onto the stack and copy the memory
1622 arguments to the outgoing arguments address. We can pass TRUE
1623 as the 4th argument because we just saved the stack pointer
1624 and will restore it right after the call. */
1625 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1626
1627 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1628 may have already set current_function_calls_alloca to true.
1629 current_function_calls_alloca won't be set if argsize is zero,
1630 so we have to guarantee need_drap is true here. */
1631 if (SUPPORTS_STACK_ALIGNMENT)
1632 crtl->need_drap = true;
1633
1634 dest = virtual_outgoing_args_rtx;
1635 #ifndef STACK_GROWS_DOWNWARD
1636 if (CONST_INT_P (argsize))
1637 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1638 else
1639 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1640 #endif
1641 dest = gen_rtx_MEM (BLKmode, dest);
1642 set_mem_align (dest, PARM_BOUNDARY);
1643 src = gen_rtx_MEM (BLKmode, incoming_args);
1644 set_mem_align (src, PARM_BOUNDARY);
1645 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1646
1647 /* Refer to the argument block. */
1648 apply_args_size ();
1649 arguments = gen_rtx_MEM (BLKmode, arguments);
1650 set_mem_align (arguments, PARM_BOUNDARY);
1651
1652 /* Walk past the arg-pointer and structure value address. */
1653 size = GET_MODE_SIZE (Pmode);
1654 if (struct_value)
1655 size += GET_MODE_SIZE (Pmode);
1656
1657 /* Restore each of the registers previously saved. Make USE insns
1658 for each of these registers for use in making the call. */
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_args_mode[regno]) != VOIDmode)
1661 {
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1665 reg = gen_rtx_REG (mode, regno);
1666 emit_move_insn (reg, adjust_address (arguments, mode, size));
1667 use_reg (&call_fusage, reg);
1668 size += GET_MODE_SIZE (mode);
1669 }
1670
1671 /* Restore the structure value address unless this is passed as an
1672 "invisible" first argument. */
1673 size = GET_MODE_SIZE (Pmode);
1674 if (struct_value)
1675 {
1676 rtx value = gen_reg_rtx (Pmode);
1677 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1678 emit_move_insn (struct_value, value);
1679 if (REG_P (struct_value))
1680 use_reg (&call_fusage, struct_value);
1681 size += GET_MODE_SIZE (Pmode);
1682 }
1683
1684 /* All arguments and registers used for the call are set up by now! */
1685 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1686
1687 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1688 and we don't want to load it into a register as an optimization,
1689 because prepare_call_address already did it if it should be done. */
1690 if (GET_CODE (function) != SYMBOL_REF)
1691 function = memory_address (FUNCTION_MODE, function);
1692
1693 /* Generate the actual call instruction and save the return value. */
1694 #ifdef HAVE_untyped_call
1695 if (HAVE_untyped_call)
1696 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1697 result, result_vector (1, result)));
1698 else
1699 #endif
1700 #ifdef HAVE_call_value
1701 if (HAVE_call_value)
1702 {
1703 rtx valreg = 0;
1704
1705 /* Locate the unique return register. It is not possible to
1706 express a call that sets more than one return register using
1707 call_value; use untyped_call for that. In fact, untyped_call
1708 only needs to save the return registers in the given block. */
1709 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1710 if ((mode = apply_result_mode[regno]) != VOIDmode)
1711 {
1712 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1713
1714 valreg = gen_rtx_REG (mode, regno);
1715 }
1716
1717 emit_call_insn (GEN_CALL_VALUE (valreg,
1718 gen_rtx_MEM (FUNCTION_MODE, function),
1719 const0_rtx, NULL_RTX, const0_rtx));
1720
1721 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1722 }
1723 else
1724 #endif
1725 gcc_unreachable ();
1726
1727 /* Find the CALL insn we just emitted, and attach the register usage
1728 information. */
1729 call_insn = last_call_insn ();
1730 add_function_usage_to (call_insn, call_fusage);
1731
1732 /* Restore the stack. */
1733 #ifdef HAVE_save_stack_nonlocal
1734 if (HAVE_save_stack_nonlocal)
1735 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1736 else
1737 #endif
1738 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1739 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1740
1741 OK_DEFER_POP;
1742
1743 /* Return the address of the result block. */
1744 result = copy_addr_to_reg (XEXP (result, 0));
1745 return convert_memory_address (ptr_mode, result);
1746 }
1747
1748 /* Perform an untyped return. */
1749
1750 static void
1751 expand_builtin_return (rtx result)
1752 {
1753 int size, align, regno;
1754 enum machine_mode mode;
1755 rtx reg;
1756 rtx call_fusage = 0;
1757
1758 result = convert_memory_address (Pmode, result);
1759
1760 apply_result_size ();
1761 result = gen_rtx_MEM (BLKmode, result);
1762
1763 #ifdef HAVE_untyped_return
1764 if (HAVE_untyped_return)
1765 {
1766 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1767 emit_barrier ();
1768 return;
1769 }
1770 #endif
1771
1772 /* Restore the return value and note that each value is used. */
1773 size = 0;
1774 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1775 if ((mode = apply_result_mode[regno]) != VOIDmode)
1776 {
1777 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1778 if (size % align != 0)
1779 size = CEIL (size, align) * align;
1780 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1781 emit_move_insn (reg, adjust_address (result, mode, size));
1782
1783 push_to_sequence (call_fusage);
1784 emit_use (reg);
1785 call_fusage = get_insns ();
1786 end_sequence ();
1787 size += GET_MODE_SIZE (mode);
1788 }
1789
1790 /* Put the USE insns before the return. */
1791 emit_insn (call_fusage);
1792
1793 /* Return whatever values was restored by jumping directly to the end
1794 of the function. */
1795 expand_naked_return ();
1796 }
1797
1798 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1799
1800 static enum type_class
1801 type_to_class (tree type)
1802 {
1803 switch (TREE_CODE (type))
1804 {
1805 case VOID_TYPE: return void_type_class;
1806 case INTEGER_TYPE: return integer_type_class;
1807 case ENUMERAL_TYPE: return enumeral_type_class;
1808 case BOOLEAN_TYPE: return boolean_type_class;
1809 case POINTER_TYPE: return pointer_type_class;
1810 case REFERENCE_TYPE: return reference_type_class;
1811 case OFFSET_TYPE: return offset_type_class;
1812 case REAL_TYPE: return real_type_class;
1813 case COMPLEX_TYPE: return complex_type_class;
1814 case FUNCTION_TYPE: return function_type_class;
1815 case METHOD_TYPE: return method_type_class;
1816 case RECORD_TYPE: return record_type_class;
1817 case UNION_TYPE:
1818 case QUAL_UNION_TYPE: return union_type_class;
1819 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1820 ? string_type_class : array_type_class);
1821 case LANG_TYPE: return lang_type_class;
1822 default: return no_type_class;
1823 }
1824 }
1825
1826 /* Expand a call EXP to __builtin_classify_type. */
1827
1828 static rtx
1829 expand_builtin_classify_type (tree exp)
1830 {
1831 if (call_expr_nargs (exp))
1832 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1833 return GEN_INT (no_type_class);
1834 }
1835
1836 /* This helper macro, meant to be used in mathfn_built_in below,
1837 determines which among a set of three builtin math functions is
1838 appropriate for a given type mode. The `F' and `L' cases are
1839 automatically generated from the `double' case. */
1840 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1841 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1842 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1843 fcodel = BUILT_IN_MATHFN##L ; break;
1844 /* Similar to above, but appends _R after any F/L suffix. */
1845 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1846 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1847 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1848 fcodel = BUILT_IN_MATHFN##L_R ; break;
1849
1850 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1851 if available. If IMPLICIT is true use the implicit builtin declaration,
1852 otherwise use the explicit declaration. If we can't do the conversion,
1853 return zero. */
1854
1855 static tree
1856 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1857 {
1858 enum built_in_function fcode, fcodef, fcodel, fcode2;
1859
1860 switch (fn)
1861 {
1862 CASE_MATHFN (BUILT_IN_ACOS)
1863 CASE_MATHFN (BUILT_IN_ACOSH)
1864 CASE_MATHFN (BUILT_IN_ASIN)
1865 CASE_MATHFN (BUILT_IN_ASINH)
1866 CASE_MATHFN (BUILT_IN_ATAN)
1867 CASE_MATHFN (BUILT_IN_ATAN2)
1868 CASE_MATHFN (BUILT_IN_ATANH)
1869 CASE_MATHFN (BUILT_IN_CBRT)
1870 CASE_MATHFN (BUILT_IN_CEIL)
1871 CASE_MATHFN (BUILT_IN_CEXPI)
1872 CASE_MATHFN (BUILT_IN_COPYSIGN)
1873 CASE_MATHFN (BUILT_IN_COS)
1874 CASE_MATHFN (BUILT_IN_COSH)
1875 CASE_MATHFN (BUILT_IN_DREM)
1876 CASE_MATHFN (BUILT_IN_ERF)
1877 CASE_MATHFN (BUILT_IN_ERFC)
1878 CASE_MATHFN (BUILT_IN_EXP)
1879 CASE_MATHFN (BUILT_IN_EXP10)
1880 CASE_MATHFN (BUILT_IN_EXP2)
1881 CASE_MATHFN (BUILT_IN_EXPM1)
1882 CASE_MATHFN (BUILT_IN_FABS)
1883 CASE_MATHFN (BUILT_IN_FDIM)
1884 CASE_MATHFN (BUILT_IN_FLOOR)
1885 CASE_MATHFN (BUILT_IN_FMA)
1886 CASE_MATHFN (BUILT_IN_FMAX)
1887 CASE_MATHFN (BUILT_IN_FMIN)
1888 CASE_MATHFN (BUILT_IN_FMOD)
1889 CASE_MATHFN (BUILT_IN_FREXP)
1890 CASE_MATHFN (BUILT_IN_GAMMA)
1891 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1892 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1893 CASE_MATHFN (BUILT_IN_HYPOT)
1894 CASE_MATHFN (BUILT_IN_ILOGB)
1895 CASE_MATHFN (BUILT_IN_ICEIL)
1896 CASE_MATHFN (BUILT_IN_IFLOOR)
1897 CASE_MATHFN (BUILT_IN_INF)
1898 CASE_MATHFN (BUILT_IN_IRINT)
1899 CASE_MATHFN (BUILT_IN_IROUND)
1900 CASE_MATHFN (BUILT_IN_ISINF)
1901 CASE_MATHFN (BUILT_IN_J0)
1902 CASE_MATHFN (BUILT_IN_J1)
1903 CASE_MATHFN (BUILT_IN_JN)
1904 CASE_MATHFN (BUILT_IN_LCEIL)
1905 CASE_MATHFN (BUILT_IN_LDEXP)
1906 CASE_MATHFN (BUILT_IN_LFLOOR)
1907 CASE_MATHFN (BUILT_IN_LGAMMA)
1908 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1909 CASE_MATHFN (BUILT_IN_LLCEIL)
1910 CASE_MATHFN (BUILT_IN_LLFLOOR)
1911 CASE_MATHFN (BUILT_IN_LLRINT)
1912 CASE_MATHFN (BUILT_IN_LLROUND)
1913 CASE_MATHFN (BUILT_IN_LOG)
1914 CASE_MATHFN (BUILT_IN_LOG10)
1915 CASE_MATHFN (BUILT_IN_LOG1P)
1916 CASE_MATHFN (BUILT_IN_LOG2)
1917 CASE_MATHFN (BUILT_IN_LOGB)
1918 CASE_MATHFN (BUILT_IN_LRINT)
1919 CASE_MATHFN (BUILT_IN_LROUND)
1920 CASE_MATHFN (BUILT_IN_MODF)
1921 CASE_MATHFN (BUILT_IN_NAN)
1922 CASE_MATHFN (BUILT_IN_NANS)
1923 CASE_MATHFN (BUILT_IN_NEARBYINT)
1924 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1925 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1926 CASE_MATHFN (BUILT_IN_POW)
1927 CASE_MATHFN (BUILT_IN_POWI)
1928 CASE_MATHFN (BUILT_IN_POW10)
1929 CASE_MATHFN (BUILT_IN_REMAINDER)
1930 CASE_MATHFN (BUILT_IN_REMQUO)
1931 CASE_MATHFN (BUILT_IN_RINT)
1932 CASE_MATHFN (BUILT_IN_ROUND)
1933 CASE_MATHFN (BUILT_IN_SCALB)
1934 CASE_MATHFN (BUILT_IN_SCALBLN)
1935 CASE_MATHFN (BUILT_IN_SCALBN)
1936 CASE_MATHFN (BUILT_IN_SIGNBIT)
1937 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1938 CASE_MATHFN (BUILT_IN_SIN)
1939 CASE_MATHFN (BUILT_IN_SINCOS)
1940 CASE_MATHFN (BUILT_IN_SINH)
1941 CASE_MATHFN (BUILT_IN_SQRT)
1942 CASE_MATHFN (BUILT_IN_TAN)
1943 CASE_MATHFN (BUILT_IN_TANH)
1944 CASE_MATHFN (BUILT_IN_TGAMMA)
1945 CASE_MATHFN (BUILT_IN_TRUNC)
1946 CASE_MATHFN (BUILT_IN_Y0)
1947 CASE_MATHFN (BUILT_IN_Y1)
1948 CASE_MATHFN (BUILT_IN_YN)
1949
1950 default:
1951 return NULL_TREE;
1952 }
1953
1954 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1955 fcode2 = fcode;
1956 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1957 fcode2 = fcodef;
1958 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1959 fcode2 = fcodel;
1960 else
1961 return NULL_TREE;
1962
1963 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1964 return NULL_TREE;
1965
1966 return builtin_decl_explicit (fcode2);
1967 }
1968
1969 /* Like mathfn_built_in_1(), but always use the implicit array. */
1970
1971 tree
1972 mathfn_built_in (tree type, enum built_in_function fn)
1973 {
1974 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1975 }
1976
1977 /* If errno must be maintained, expand the RTL to check if the result,
1978 TARGET, of a built-in function call, EXP, is NaN, and if so set
1979 errno to EDOM. */
1980
1981 static void
1982 expand_errno_check (tree exp, rtx target)
1983 {
1984 rtx lab = gen_label_rtx ();
1985
1986 /* Test the result; if it is NaN, set errno=EDOM because
1987 the argument was not in the domain. */
1988 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1989 NULL_RTX, NULL_RTX, lab,
1990 /* The jump is very likely. */
1991 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1992
1993 #ifdef TARGET_EDOM
1994 /* If this built-in doesn't throw an exception, set errno directly. */
1995 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1996 {
1997 #ifdef GEN_ERRNO_RTX
1998 rtx errno_rtx = GEN_ERRNO_RTX;
1999 #else
2000 rtx errno_rtx
2001 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2002 #endif
2003 emit_move_insn (errno_rtx,
2004 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2005 emit_label (lab);
2006 return;
2007 }
2008 #endif
2009
2010 /* Make sure the library call isn't expanded as a tail call. */
2011 CALL_EXPR_TAILCALL (exp) = 0;
2012
2013 /* We can't set errno=EDOM directly; let the library call do it.
2014 Pop the arguments right away in case the call gets deleted. */
2015 NO_DEFER_POP;
2016 expand_call (exp, target, 0);
2017 OK_DEFER_POP;
2018 emit_label (lab);
2019 }
2020
2021 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2022 Return NULL_RTX if a normal call should be emitted rather than expanding
2023 the function in-line. EXP is the expression that is a call to the builtin
2024 function; if convenient, the result should be placed in TARGET.
2025 SUBTARGET may be used as the target for computing one of EXP's operands. */
2026
2027 static rtx
2028 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2029 {
2030 optab builtin_optab;
2031 rtx op0, insns;
2032 tree fndecl = get_callee_fndecl (exp);
2033 enum machine_mode mode;
2034 bool errno_set = false;
2035 bool try_widening = false;
2036 tree arg;
2037
2038 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2039 return NULL_RTX;
2040
2041 arg = CALL_EXPR_ARG (exp, 0);
2042
2043 switch (DECL_FUNCTION_CODE (fndecl))
2044 {
2045 CASE_FLT_FN (BUILT_IN_SQRT):
2046 errno_set = ! tree_expr_nonnegative_p (arg);
2047 try_widening = true;
2048 builtin_optab = sqrt_optab;
2049 break;
2050 CASE_FLT_FN (BUILT_IN_EXP):
2051 errno_set = true; builtin_optab = exp_optab; break;
2052 CASE_FLT_FN (BUILT_IN_EXP10):
2053 CASE_FLT_FN (BUILT_IN_POW10):
2054 errno_set = true; builtin_optab = exp10_optab; break;
2055 CASE_FLT_FN (BUILT_IN_EXP2):
2056 errno_set = true; builtin_optab = exp2_optab; break;
2057 CASE_FLT_FN (BUILT_IN_EXPM1):
2058 errno_set = true; builtin_optab = expm1_optab; break;
2059 CASE_FLT_FN (BUILT_IN_LOGB):
2060 errno_set = true; builtin_optab = logb_optab; break;
2061 CASE_FLT_FN (BUILT_IN_LOG):
2062 errno_set = true; builtin_optab = log_optab; break;
2063 CASE_FLT_FN (BUILT_IN_LOG10):
2064 errno_set = true; builtin_optab = log10_optab; break;
2065 CASE_FLT_FN (BUILT_IN_LOG2):
2066 errno_set = true; builtin_optab = log2_optab; break;
2067 CASE_FLT_FN (BUILT_IN_LOG1P):
2068 errno_set = true; builtin_optab = log1p_optab; break;
2069 CASE_FLT_FN (BUILT_IN_ASIN):
2070 builtin_optab = asin_optab; break;
2071 CASE_FLT_FN (BUILT_IN_ACOS):
2072 builtin_optab = acos_optab; break;
2073 CASE_FLT_FN (BUILT_IN_TAN):
2074 builtin_optab = tan_optab; break;
2075 CASE_FLT_FN (BUILT_IN_ATAN):
2076 builtin_optab = atan_optab; break;
2077 CASE_FLT_FN (BUILT_IN_FLOOR):
2078 builtin_optab = floor_optab; break;
2079 CASE_FLT_FN (BUILT_IN_CEIL):
2080 builtin_optab = ceil_optab; break;
2081 CASE_FLT_FN (BUILT_IN_TRUNC):
2082 builtin_optab = btrunc_optab; break;
2083 CASE_FLT_FN (BUILT_IN_ROUND):
2084 builtin_optab = round_optab; break;
2085 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2086 builtin_optab = nearbyint_optab;
2087 if (flag_trapping_math)
2088 break;
2089 /* Else fallthrough and expand as rint. */
2090 CASE_FLT_FN (BUILT_IN_RINT):
2091 builtin_optab = rint_optab; break;
2092 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2093 builtin_optab = significand_optab; break;
2094 default:
2095 gcc_unreachable ();
2096 }
2097
2098 /* Make a suitable register to place result in. */
2099 mode = TYPE_MODE (TREE_TYPE (exp));
2100
2101 if (! flag_errno_math || ! HONOR_NANS (mode))
2102 errno_set = false;
2103
2104 /* Before working hard, check whether the instruction is available, but try
2105 to widen the mode for specific operations. */
2106 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2107 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2108 && (!errno_set || !optimize_insn_for_size_p ()))
2109 {
2110 rtx result = gen_reg_rtx (mode);
2111
2112 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2113 need to expand the argument again. This way, we will not perform
2114 side-effects more the once. */
2115 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2116
2117 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2118
2119 start_sequence ();
2120
2121 /* Compute into RESULT.
2122 Set RESULT to wherever the result comes back. */
2123 result = expand_unop (mode, builtin_optab, op0, result, 0);
2124
2125 if (result != 0)
2126 {
2127 if (errno_set)
2128 expand_errno_check (exp, result);
2129
2130 /* Output the entire sequence. */
2131 insns = get_insns ();
2132 end_sequence ();
2133 emit_insn (insns);
2134 return result;
2135 }
2136
2137 /* If we were unable to expand via the builtin, stop the sequence
2138 (without outputting the insns) and call to the library function
2139 with the stabilized argument list. */
2140 end_sequence ();
2141 }
2142
2143 return expand_call (exp, target, target == const0_rtx);
2144 }
2145
2146 /* Expand a call to the builtin binary math functions (pow and atan2).
2147 Return NULL_RTX if a normal call should be emitted rather than expanding the
2148 function in-line. EXP is the expression that is a call to the builtin
2149 function; if convenient, the result should be placed in TARGET.
2150 SUBTARGET may be used as the target for computing one of EXP's
2151 operands. */
2152
2153 static rtx
2154 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2155 {
2156 optab builtin_optab;
2157 rtx op0, op1, insns, result;
2158 int op1_type = REAL_TYPE;
2159 tree fndecl = get_callee_fndecl (exp);
2160 tree arg0, arg1;
2161 enum machine_mode mode;
2162 bool errno_set = true;
2163
2164 switch (DECL_FUNCTION_CODE (fndecl))
2165 {
2166 CASE_FLT_FN (BUILT_IN_SCALBN):
2167 CASE_FLT_FN (BUILT_IN_SCALBLN):
2168 CASE_FLT_FN (BUILT_IN_LDEXP):
2169 op1_type = INTEGER_TYPE;
2170 default:
2171 break;
2172 }
2173
2174 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2175 return NULL_RTX;
2176
2177 arg0 = CALL_EXPR_ARG (exp, 0);
2178 arg1 = CALL_EXPR_ARG (exp, 1);
2179
2180 switch (DECL_FUNCTION_CODE (fndecl))
2181 {
2182 CASE_FLT_FN (BUILT_IN_POW):
2183 builtin_optab = pow_optab; break;
2184 CASE_FLT_FN (BUILT_IN_ATAN2):
2185 builtin_optab = atan2_optab; break;
2186 CASE_FLT_FN (BUILT_IN_SCALB):
2187 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2188 return 0;
2189 builtin_optab = scalb_optab; break;
2190 CASE_FLT_FN (BUILT_IN_SCALBN):
2191 CASE_FLT_FN (BUILT_IN_SCALBLN):
2192 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2193 return 0;
2194 /* Fall through... */
2195 CASE_FLT_FN (BUILT_IN_LDEXP):
2196 builtin_optab = ldexp_optab; break;
2197 CASE_FLT_FN (BUILT_IN_FMOD):
2198 builtin_optab = fmod_optab; break;
2199 CASE_FLT_FN (BUILT_IN_REMAINDER):
2200 CASE_FLT_FN (BUILT_IN_DREM):
2201 builtin_optab = remainder_optab; break;
2202 default:
2203 gcc_unreachable ();
2204 }
2205
2206 /* Make a suitable register to place result in. */
2207 mode = TYPE_MODE (TREE_TYPE (exp));
2208
2209 /* Before working hard, check whether the instruction is available. */
2210 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2211 return NULL_RTX;
2212
2213 result = gen_reg_rtx (mode);
2214
2215 if (! flag_errno_math || ! HONOR_NANS (mode))
2216 errno_set = false;
2217
2218 if (errno_set && optimize_insn_for_size_p ())
2219 return 0;
2220
2221 /* Always stabilize the argument list. */
2222 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2223 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2224
2225 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2226 op1 = expand_normal (arg1);
2227
2228 start_sequence ();
2229
2230 /* Compute into RESULT.
2231 Set RESULT to wherever the result comes back. */
2232 result = expand_binop (mode, builtin_optab, op0, op1,
2233 result, 0, OPTAB_DIRECT);
2234
2235 /* If we were unable to expand via the builtin, stop the sequence
2236 (without outputting the insns) and call to the library function
2237 with the stabilized argument list. */
2238 if (result == 0)
2239 {
2240 end_sequence ();
2241 return expand_call (exp, target, target == const0_rtx);
2242 }
2243
2244 if (errno_set)
2245 expand_errno_check (exp, result);
2246
2247 /* Output the entire sequence. */
2248 insns = get_insns ();
2249 end_sequence ();
2250 emit_insn (insns);
2251
2252 return result;
2253 }
2254
2255 /* Expand a call to the builtin trinary math functions (fma).
2256 Return NULL_RTX if a normal call should be emitted rather than expanding the
2257 function in-line. EXP is the expression that is a call to the builtin
2258 function; if convenient, the result should be placed in TARGET.
2259 SUBTARGET may be used as the target for computing one of EXP's
2260 operands. */
2261
2262 static rtx
2263 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2264 {
2265 optab builtin_optab;
2266 rtx op0, op1, op2, insns, result;
2267 tree fndecl = get_callee_fndecl (exp);
2268 tree arg0, arg1, arg2;
2269 enum machine_mode mode;
2270
2271 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2272 return NULL_RTX;
2273
2274 arg0 = CALL_EXPR_ARG (exp, 0);
2275 arg1 = CALL_EXPR_ARG (exp, 1);
2276 arg2 = CALL_EXPR_ARG (exp, 2);
2277
2278 switch (DECL_FUNCTION_CODE (fndecl))
2279 {
2280 CASE_FLT_FN (BUILT_IN_FMA):
2281 builtin_optab = fma_optab; break;
2282 default:
2283 gcc_unreachable ();
2284 }
2285
2286 /* Make a suitable register to place result in. */
2287 mode = TYPE_MODE (TREE_TYPE (exp));
2288
2289 /* Before working hard, check whether the instruction is available. */
2290 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2291 return NULL_RTX;
2292
2293 result = gen_reg_rtx (mode);
2294
2295 /* Always stabilize the argument list. */
2296 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2297 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2298 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2299
2300 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2301 op1 = expand_normal (arg1);
2302 op2 = expand_normal (arg2);
2303
2304 start_sequence ();
2305
2306 /* Compute into RESULT.
2307 Set RESULT to wherever the result comes back. */
2308 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2309 result, 0);
2310
2311 /* If we were unable to expand via the builtin, stop the sequence
2312 (without outputting the insns) and call to the library function
2313 with the stabilized argument list. */
2314 if (result == 0)
2315 {
2316 end_sequence ();
2317 return expand_call (exp, target, target == const0_rtx);
2318 }
2319
2320 /* Output the entire sequence. */
2321 insns = get_insns ();
2322 end_sequence ();
2323 emit_insn (insns);
2324
2325 return result;
2326 }
2327
2328 /* Expand a call to the builtin sin and cos math functions.
2329 Return NULL_RTX if a normal call should be emitted rather than expanding the
2330 function in-line. EXP is the expression that is a call to the builtin
2331 function; if convenient, the result should be placed in TARGET.
2332 SUBTARGET may be used as the target for computing one of EXP's
2333 operands. */
2334
2335 static rtx
2336 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2337 {
2338 optab builtin_optab;
2339 rtx op0, insns;
2340 tree fndecl = get_callee_fndecl (exp);
2341 enum machine_mode mode;
2342 tree arg;
2343
2344 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2345 return NULL_RTX;
2346
2347 arg = CALL_EXPR_ARG (exp, 0);
2348
2349 switch (DECL_FUNCTION_CODE (fndecl))
2350 {
2351 CASE_FLT_FN (BUILT_IN_SIN):
2352 CASE_FLT_FN (BUILT_IN_COS):
2353 builtin_optab = sincos_optab; break;
2354 default:
2355 gcc_unreachable ();
2356 }
2357
2358 /* Make a suitable register to place result in. */
2359 mode = TYPE_MODE (TREE_TYPE (exp));
2360
2361 /* Check if sincos insn is available, otherwise fallback
2362 to sin or cos insn. */
2363 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2364 switch (DECL_FUNCTION_CODE (fndecl))
2365 {
2366 CASE_FLT_FN (BUILT_IN_SIN):
2367 builtin_optab = sin_optab; break;
2368 CASE_FLT_FN (BUILT_IN_COS):
2369 builtin_optab = cos_optab; break;
2370 default:
2371 gcc_unreachable ();
2372 }
2373
2374 /* Before working hard, check whether the instruction is available. */
2375 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2376 {
2377 rtx result = gen_reg_rtx (mode);
2378
2379 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2380 need to expand the argument again. This way, we will not perform
2381 side-effects more the once. */
2382 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2383
2384 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2385
2386 start_sequence ();
2387
2388 /* Compute into RESULT.
2389 Set RESULT to wherever the result comes back. */
2390 if (builtin_optab == sincos_optab)
2391 {
2392 int ok;
2393
2394 switch (DECL_FUNCTION_CODE (fndecl))
2395 {
2396 CASE_FLT_FN (BUILT_IN_SIN):
2397 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2398 break;
2399 CASE_FLT_FN (BUILT_IN_COS):
2400 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2401 break;
2402 default:
2403 gcc_unreachable ();
2404 }
2405 gcc_assert (ok);
2406 }
2407 else
2408 result = expand_unop (mode, builtin_optab, op0, result, 0);
2409
2410 if (result != 0)
2411 {
2412 /* Output the entire sequence. */
2413 insns = get_insns ();
2414 end_sequence ();
2415 emit_insn (insns);
2416 return result;
2417 }
2418
2419 /* If we were unable to expand via the builtin, stop the sequence
2420 (without outputting the insns) and call to the library function
2421 with the stabilized argument list. */
2422 end_sequence ();
2423 }
2424
2425 return expand_call (exp, target, target == const0_rtx);
2426 }
2427
2428 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2429 return an RTL instruction code that implements the functionality.
2430 If that isn't possible or available return CODE_FOR_nothing. */
2431
2432 static enum insn_code
2433 interclass_mathfn_icode (tree arg, tree fndecl)
2434 {
2435 bool errno_set = false;
2436 optab builtin_optab = unknown_optab;
2437 enum machine_mode mode;
2438
2439 switch (DECL_FUNCTION_CODE (fndecl))
2440 {
2441 CASE_FLT_FN (BUILT_IN_ILOGB):
2442 errno_set = true; builtin_optab = ilogb_optab; break;
2443 CASE_FLT_FN (BUILT_IN_ISINF):
2444 builtin_optab = isinf_optab; break;
2445 case BUILT_IN_ISNORMAL:
2446 case BUILT_IN_ISFINITE:
2447 CASE_FLT_FN (BUILT_IN_FINITE):
2448 case BUILT_IN_FINITED32:
2449 case BUILT_IN_FINITED64:
2450 case BUILT_IN_FINITED128:
2451 case BUILT_IN_ISINFD32:
2452 case BUILT_IN_ISINFD64:
2453 case BUILT_IN_ISINFD128:
2454 /* These builtins have no optabs (yet). */
2455 break;
2456 default:
2457 gcc_unreachable ();
2458 }
2459
2460 /* There's no easy way to detect the case we need to set EDOM. */
2461 if (flag_errno_math && errno_set)
2462 return CODE_FOR_nothing;
2463
2464 /* Optab mode depends on the mode of the input argument. */
2465 mode = TYPE_MODE (TREE_TYPE (arg));
2466
2467 if (builtin_optab)
2468 return optab_handler (builtin_optab, mode);
2469 return CODE_FOR_nothing;
2470 }
2471
2472 /* Expand a call to one of the builtin math functions that operate on
2473 floating point argument and output an integer result (ilogb, isinf,
2474 isnan, etc).
2475 Return 0 if a normal call should be emitted rather than expanding the
2476 function in-line. EXP is the expression that is a call to the builtin
2477 function; if convenient, the result should be placed in TARGET. */
2478
2479 static rtx
2480 expand_builtin_interclass_mathfn (tree exp, rtx target)
2481 {
2482 enum insn_code icode = CODE_FOR_nothing;
2483 rtx op0;
2484 tree fndecl = get_callee_fndecl (exp);
2485 enum machine_mode mode;
2486 tree arg;
2487
2488 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2489 return NULL_RTX;
2490
2491 arg = CALL_EXPR_ARG (exp, 0);
2492 icode = interclass_mathfn_icode (arg, fndecl);
2493 mode = TYPE_MODE (TREE_TYPE (arg));
2494
2495 if (icode != CODE_FOR_nothing)
2496 {
2497 struct expand_operand ops[1];
2498 rtx last = get_last_insn ();
2499 tree orig_arg = arg;
2500
2501 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2502 need to expand the argument again. This way, we will not perform
2503 side-effects more the once. */
2504 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2505
2506 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2507
2508 if (mode != GET_MODE (op0))
2509 op0 = convert_to_mode (mode, op0, 0);
2510
2511 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2512 if (maybe_legitimize_operands (icode, 0, 1, ops)
2513 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2514 return ops[0].value;
2515
2516 delete_insns_since (last);
2517 CALL_EXPR_ARG (exp, 0) = orig_arg;
2518 }
2519
2520 return NULL_RTX;
2521 }
2522
2523 /* Expand a call to the builtin sincos math function.
2524 Return NULL_RTX if a normal call should be emitted rather than expanding the
2525 function in-line. EXP is the expression that is a call to the builtin
2526 function. */
2527
2528 static rtx
2529 expand_builtin_sincos (tree exp)
2530 {
2531 rtx op0, op1, op2, target1, target2;
2532 enum machine_mode mode;
2533 tree arg, sinp, cosp;
2534 int result;
2535 location_t loc = EXPR_LOCATION (exp);
2536 tree alias_type, alias_off;
2537
2538 if (!validate_arglist (exp, REAL_TYPE,
2539 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2540 return NULL_RTX;
2541
2542 arg = CALL_EXPR_ARG (exp, 0);
2543 sinp = CALL_EXPR_ARG (exp, 1);
2544 cosp = CALL_EXPR_ARG (exp, 2);
2545
2546 /* Make a suitable register to place result in. */
2547 mode = TYPE_MODE (TREE_TYPE (arg));
2548
2549 /* Check if sincos insn is available, otherwise emit the call. */
2550 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2551 return NULL_RTX;
2552
2553 target1 = gen_reg_rtx (mode);
2554 target2 = gen_reg_rtx (mode);
2555
2556 op0 = expand_normal (arg);
2557 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2558 alias_off = build_int_cst (alias_type, 0);
2559 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2560 sinp, alias_off));
2561 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2562 cosp, alias_off));
2563
2564 /* Compute into target1 and target2.
2565 Set TARGET to wherever the result comes back. */
2566 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2567 gcc_assert (result);
2568
2569 /* Move target1 and target2 to the memory locations indicated
2570 by op1 and op2. */
2571 emit_move_insn (op1, target1);
2572 emit_move_insn (op2, target2);
2573
2574 return const0_rtx;
2575 }
2576
2577 /* Expand a call to the internal cexpi builtin to the sincos math function.
2578 EXP is the expression that is a call to the builtin function; if convenient,
2579 the result should be placed in TARGET. */
2580
2581 static rtx
2582 expand_builtin_cexpi (tree exp, rtx target)
2583 {
2584 tree fndecl = get_callee_fndecl (exp);
2585 tree arg, type;
2586 enum machine_mode mode;
2587 rtx op0, op1, op2;
2588 location_t loc = EXPR_LOCATION (exp);
2589
2590 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2591 return NULL_RTX;
2592
2593 arg = CALL_EXPR_ARG (exp, 0);
2594 type = TREE_TYPE (arg);
2595 mode = TYPE_MODE (TREE_TYPE (arg));
2596
2597 /* Try expanding via a sincos optab, fall back to emitting a libcall
2598 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2599 is only generated from sincos, cexp or if we have either of them. */
2600 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2601 {
2602 op1 = gen_reg_rtx (mode);
2603 op2 = gen_reg_rtx (mode);
2604
2605 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2606
2607 /* Compute into op1 and op2. */
2608 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2609 }
2610 else if (targetm.libc_has_function (function_sincos))
2611 {
2612 tree call, fn = NULL_TREE;
2613 tree top1, top2;
2614 rtx op1a, op2a;
2615
2616 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2617 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2618 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2619 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2621 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2622 else
2623 gcc_unreachable ();
2624
2625 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2626 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2627 op1a = copy_addr_to_reg (XEXP (op1, 0));
2628 op2a = copy_addr_to_reg (XEXP (op2, 0));
2629 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2630 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2631
2632 /* Make sure not to fold the sincos call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2634 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2635 call, 3, arg, top1, top2));
2636 }
2637 else
2638 {
2639 tree call, fn = NULL_TREE, narg;
2640 tree ctype = build_complex_type (type);
2641
2642 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2643 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2644 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2645 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2646 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2647 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2648 else
2649 gcc_unreachable ();
2650
2651 /* If we don't have a decl for cexp create one. This is the
2652 friendliest fallback if the user calls __builtin_cexpi
2653 without full target C99 function support. */
2654 if (fn == NULL_TREE)
2655 {
2656 tree fntype;
2657 const char *name = NULL;
2658
2659 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2660 name = "cexpf";
2661 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2662 name = "cexp";
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2664 name = "cexpl";
2665
2666 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2667 fn = build_fn_decl (name, fntype);
2668 }
2669
2670 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2671 build_real (type, dconst0), arg);
2672
2673 /* Make sure not to fold the cexp call again. */
2674 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2675 return expand_expr (build_call_nary (ctype, call, 1, narg),
2676 target, VOIDmode, EXPAND_NORMAL);
2677 }
2678
2679 /* Now build the proper return type. */
2680 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2681 make_tree (TREE_TYPE (arg), op2),
2682 make_tree (TREE_TYPE (arg), op1)),
2683 target, VOIDmode, EXPAND_NORMAL);
2684 }
2685
2686 /* Conveniently construct a function call expression. FNDECL names the
2687 function to be called, N is the number of arguments, and the "..."
2688 parameters are the argument expressions. Unlike build_call_exr
2689 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2690
2691 static tree
2692 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2693 {
2694 va_list ap;
2695 tree fntype = TREE_TYPE (fndecl);
2696 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2697
2698 va_start (ap, n);
2699 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2700 va_end (ap);
2701 SET_EXPR_LOCATION (fn, loc);
2702 return fn;
2703 }
2704
2705 /* Expand a call to one of the builtin rounding functions gcc defines
2706 as an extension (lfloor and lceil). As these are gcc extensions we
2707 do not need to worry about setting errno to EDOM.
2708 If expanding via optab fails, lower expression to (int)(floor(x)).
2709 EXP is the expression that is a call to the builtin function;
2710 if convenient, the result should be placed in TARGET. */
2711
2712 static rtx
2713 expand_builtin_int_roundingfn (tree exp, rtx target)
2714 {
2715 convert_optab builtin_optab;
2716 rtx op0, insns, tmp;
2717 tree fndecl = get_callee_fndecl (exp);
2718 enum built_in_function fallback_fn;
2719 tree fallback_fndecl;
2720 enum machine_mode mode;
2721 tree arg;
2722
2723 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2724 gcc_unreachable ();
2725
2726 arg = CALL_EXPR_ARG (exp, 0);
2727
2728 switch (DECL_FUNCTION_CODE (fndecl))
2729 {
2730 CASE_FLT_FN (BUILT_IN_ICEIL):
2731 CASE_FLT_FN (BUILT_IN_LCEIL):
2732 CASE_FLT_FN (BUILT_IN_LLCEIL):
2733 builtin_optab = lceil_optab;
2734 fallback_fn = BUILT_IN_CEIL;
2735 break;
2736
2737 CASE_FLT_FN (BUILT_IN_IFLOOR):
2738 CASE_FLT_FN (BUILT_IN_LFLOOR):
2739 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2740 builtin_optab = lfloor_optab;
2741 fallback_fn = BUILT_IN_FLOOR;
2742 break;
2743
2744 default:
2745 gcc_unreachable ();
2746 }
2747
2748 /* Make a suitable register to place result in. */
2749 mode = TYPE_MODE (TREE_TYPE (exp));
2750
2751 target = gen_reg_rtx (mode);
2752
2753 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2754 need to expand the argument again. This way, we will not perform
2755 side-effects more the once. */
2756 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2757
2758 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2759
2760 start_sequence ();
2761
2762 /* Compute into TARGET. */
2763 if (expand_sfix_optab (target, op0, builtin_optab))
2764 {
2765 /* Output the entire sequence. */
2766 insns = get_insns ();
2767 end_sequence ();
2768 emit_insn (insns);
2769 return target;
2770 }
2771
2772 /* If we were unable to expand via the builtin, stop the sequence
2773 (without outputting the insns). */
2774 end_sequence ();
2775
2776 /* Fall back to floating point rounding optab. */
2777 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2778
2779 /* For non-C99 targets we may end up without a fallback fndecl here
2780 if the user called __builtin_lfloor directly. In this case emit
2781 a call to the floor/ceil variants nevertheless. This should result
2782 in the best user experience for not full C99 targets. */
2783 if (fallback_fndecl == NULL_TREE)
2784 {
2785 tree fntype;
2786 const char *name = NULL;
2787
2788 switch (DECL_FUNCTION_CODE (fndecl))
2789 {
2790 case BUILT_IN_ICEIL:
2791 case BUILT_IN_LCEIL:
2792 case BUILT_IN_LLCEIL:
2793 name = "ceil";
2794 break;
2795 case BUILT_IN_ICEILF:
2796 case BUILT_IN_LCEILF:
2797 case BUILT_IN_LLCEILF:
2798 name = "ceilf";
2799 break;
2800 case BUILT_IN_ICEILL:
2801 case BUILT_IN_LCEILL:
2802 case BUILT_IN_LLCEILL:
2803 name = "ceill";
2804 break;
2805 case BUILT_IN_IFLOOR:
2806 case BUILT_IN_LFLOOR:
2807 case BUILT_IN_LLFLOOR:
2808 name = "floor";
2809 break;
2810 case BUILT_IN_IFLOORF:
2811 case BUILT_IN_LFLOORF:
2812 case BUILT_IN_LLFLOORF:
2813 name = "floorf";
2814 break;
2815 case BUILT_IN_IFLOORL:
2816 case BUILT_IN_LFLOORL:
2817 case BUILT_IN_LLFLOORL:
2818 name = "floorl";
2819 break;
2820 default:
2821 gcc_unreachable ();
2822 }
2823
2824 fntype = build_function_type_list (TREE_TYPE (arg),
2825 TREE_TYPE (arg), NULL_TREE);
2826 fallback_fndecl = build_fn_decl (name, fntype);
2827 }
2828
2829 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2830
2831 tmp = expand_normal (exp);
2832 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2833
2834 /* Truncate the result of floating point optab to integer
2835 via expand_fix (). */
2836 target = gen_reg_rtx (mode);
2837 expand_fix (target, tmp, 0);
2838
2839 return target;
2840 }
2841
2842 /* Expand a call to one of the builtin math functions doing integer
2843 conversion (lrint).
2844 Return 0 if a normal call should be emitted rather than expanding the
2845 function in-line. EXP is the expression that is a call to the builtin
2846 function; if convenient, the result should be placed in TARGET. */
2847
2848 static rtx
2849 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2850 {
2851 convert_optab builtin_optab;
2852 rtx op0, insns;
2853 tree fndecl = get_callee_fndecl (exp);
2854 tree arg;
2855 enum machine_mode mode;
2856 enum built_in_function fallback_fn = BUILT_IN_NONE;
2857
2858 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2859 gcc_unreachable ();
2860
2861 arg = CALL_EXPR_ARG (exp, 0);
2862
2863 switch (DECL_FUNCTION_CODE (fndecl))
2864 {
2865 CASE_FLT_FN (BUILT_IN_IRINT):
2866 fallback_fn = BUILT_IN_LRINT;
2867 /* FALLTHRU */
2868 CASE_FLT_FN (BUILT_IN_LRINT):
2869 CASE_FLT_FN (BUILT_IN_LLRINT):
2870 builtin_optab = lrint_optab;
2871 break;
2872
2873 CASE_FLT_FN (BUILT_IN_IROUND):
2874 fallback_fn = BUILT_IN_LROUND;
2875 /* FALLTHRU */
2876 CASE_FLT_FN (BUILT_IN_LROUND):
2877 CASE_FLT_FN (BUILT_IN_LLROUND):
2878 builtin_optab = lround_optab;
2879 break;
2880
2881 default:
2882 gcc_unreachable ();
2883 }
2884
2885 /* There's no easy way to detect the case we need to set EDOM. */
2886 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2887 return NULL_RTX;
2888
2889 /* Make a suitable register to place result in. */
2890 mode = TYPE_MODE (TREE_TYPE (exp));
2891
2892 /* There's no easy way to detect the case we need to set EDOM. */
2893 if (!flag_errno_math)
2894 {
2895 rtx result = gen_reg_rtx (mode);
2896
2897 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2898 need to expand the argument again. This way, we will not perform
2899 side-effects more the once. */
2900 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2901
2902 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2903
2904 start_sequence ();
2905
2906 if (expand_sfix_optab (result, op0, builtin_optab))
2907 {
2908 /* Output the entire sequence. */
2909 insns = get_insns ();
2910 end_sequence ();
2911 emit_insn (insns);
2912 return result;
2913 }
2914
2915 /* If we were unable to expand via the builtin, stop the sequence
2916 (without outputting the insns) and call to the library function
2917 with the stabilized argument list. */
2918 end_sequence ();
2919 }
2920
2921 if (fallback_fn != BUILT_IN_NONE)
2922 {
2923 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2924 targets, (int) round (x) should never be transformed into
2925 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2926 a call to lround in the hope that the target provides at least some
2927 C99 functions. This should result in the best user experience for
2928 not full C99 targets. */
2929 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2930 fallback_fn, 0);
2931
2932 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2933 fallback_fndecl, 1, arg);
2934
2935 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2936 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2937 return convert_to_mode (mode, target, 0);
2938 }
2939
2940 return expand_call (exp, target, target == const0_rtx);
2941 }
2942
2943 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2944 a normal call should be emitted rather than expanding the function
2945 in-line. EXP is the expression that is a call to the builtin
2946 function; if convenient, the result should be placed in TARGET. */
2947
2948 static rtx
2949 expand_builtin_powi (tree exp, rtx target)
2950 {
2951 tree arg0, arg1;
2952 rtx op0, op1;
2953 enum machine_mode mode;
2954 enum machine_mode mode2;
2955
2956 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2957 return NULL_RTX;
2958
2959 arg0 = CALL_EXPR_ARG (exp, 0);
2960 arg1 = CALL_EXPR_ARG (exp, 1);
2961 mode = TYPE_MODE (TREE_TYPE (exp));
2962
2963 /* Emit a libcall to libgcc. */
2964
2965 /* Mode of the 2nd argument must match that of an int. */
2966 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2967
2968 if (target == NULL_RTX)
2969 target = gen_reg_rtx (mode);
2970
2971 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2972 if (GET_MODE (op0) != mode)
2973 op0 = convert_to_mode (mode, op0, 0);
2974 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2975 if (GET_MODE (op1) != mode2)
2976 op1 = convert_to_mode (mode2, op1, 0);
2977
2978 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2979 target, LCT_CONST, mode, 2,
2980 op0, mode, op1, mode2);
2981
2982 return target;
2983 }
2984
2985 /* Expand expression EXP which is a call to the strlen builtin. Return
2986 NULL_RTX if we failed the caller should emit a normal call, otherwise
2987 try to get the result in TARGET, if convenient. */
2988
2989 static rtx
2990 expand_builtin_strlen (tree exp, rtx target,
2991 enum machine_mode target_mode)
2992 {
2993 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2994 return NULL_RTX;
2995 else
2996 {
2997 struct expand_operand ops[4];
2998 rtx pat;
2999 tree len;
3000 tree src = CALL_EXPR_ARG (exp, 0);
3001 rtx src_reg, before_strlen;
3002 enum machine_mode insn_mode = target_mode;
3003 enum insn_code icode = CODE_FOR_nothing;
3004 unsigned int align;
3005
3006 /* If the length can be computed at compile-time, return it. */
3007 len = c_strlen (src, 0);
3008 if (len)
3009 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3010
3011 /* If the length can be computed at compile-time and is constant
3012 integer, but there are side-effects in src, evaluate
3013 src for side-effects, then return len.
3014 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3015 can be optimized into: i++; x = 3; */
3016 len = c_strlen (src, 1);
3017 if (len && TREE_CODE (len) == INTEGER_CST)
3018 {
3019 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3020 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3021 }
3022
3023 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3024
3025 /* If SRC is not a pointer type, don't do this operation inline. */
3026 if (align == 0)
3027 return NULL_RTX;
3028
3029 /* Bail out if we can't compute strlen in the right mode. */
3030 while (insn_mode != VOIDmode)
3031 {
3032 icode = optab_handler (strlen_optab, insn_mode);
3033 if (icode != CODE_FOR_nothing)
3034 break;
3035
3036 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3037 }
3038 if (insn_mode == VOIDmode)
3039 return NULL_RTX;
3040
3041 /* Make a place to hold the source address. We will not expand
3042 the actual source until we are sure that the expansion will
3043 not fail -- there are trees that cannot be expanded twice. */
3044 src_reg = gen_reg_rtx (Pmode);
3045
3046 /* Mark the beginning of the strlen sequence so we can emit the
3047 source operand later. */
3048 before_strlen = get_last_insn ();
3049
3050 create_output_operand (&ops[0], target, insn_mode);
3051 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3052 create_integer_operand (&ops[2], 0);
3053 create_integer_operand (&ops[3], align);
3054 if (!maybe_expand_insn (icode, 4, ops))
3055 return NULL_RTX;
3056
3057 /* Now that we are assured of success, expand the source. */
3058 start_sequence ();
3059 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3060 if (pat != src_reg)
3061 {
3062 #ifdef POINTERS_EXTEND_UNSIGNED
3063 if (GET_MODE (pat) != Pmode)
3064 pat = convert_to_mode (Pmode, pat,
3065 POINTERS_EXTEND_UNSIGNED);
3066 #endif
3067 emit_move_insn (src_reg, pat);
3068 }
3069 pat = get_insns ();
3070 end_sequence ();
3071
3072 if (before_strlen)
3073 emit_insn_after (pat, before_strlen);
3074 else
3075 emit_insn_before (pat, get_insns ());
3076
3077 /* Return the value in the proper mode for this function. */
3078 if (GET_MODE (ops[0].value) == target_mode)
3079 target = ops[0].value;
3080 else if (target != 0)
3081 convert_move (target, ops[0].value, 0);
3082 else
3083 target = convert_to_mode (target_mode, ops[0].value, 0);
3084
3085 return target;
3086 }
3087 }
3088
3089 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3090 bytes from constant string DATA + OFFSET and return it as target
3091 constant. */
3092
3093 static rtx
3094 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3095 enum machine_mode mode)
3096 {
3097 const char *str = (const char *) data;
3098
3099 gcc_assert (offset >= 0
3100 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3101 <= strlen (str) + 1));
3102
3103 return c_readstr (str + offset, mode);
3104 }
3105
3106 /* LEN specify length of the block of memcpy/memset operation.
3107 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3108 In some cases we can make very likely guess on max size, then we
3109 set it into PROBABLE_MAX_SIZE. */
3110
3111 static void
3112 determine_block_size (tree len, rtx len_rtx,
3113 unsigned HOST_WIDE_INT *min_size,
3114 unsigned HOST_WIDE_INT *max_size,
3115 unsigned HOST_WIDE_INT *probable_max_size)
3116 {
3117 if (CONST_INT_P (len_rtx))
3118 {
3119 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3120 return;
3121 }
3122 else
3123 {
3124 double_int min, max;
3125 enum value_range_type range_type = VR_UNDEFINED;
3126
3127 /* Determine bounds from the type. */
3128 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3129 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3130 else
3131 *min_size = 0;
3132 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3133 *probable_max_size = *max_size
3134 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3135 else
3136 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3137
3138 if (TREE_CODE (len) == SSA_NAME)
3139 range_type = get_range_info (len, &min, &max);
3140 if (range_type == VR_RANGE)
3141 {
3142 if (min.fits_uhwi () && *min_size < min.to_uhwi ())
3143 *min_size = min.to_uhwi ();
3144 if (max.fits_uhwi () && *max_size > max.to_uhwi ())
3145 *probable_max_size = *max_size = max.to_uhwi ();
3146 }
3147 else if (range_type == VR_ANTI_RANGE)
3148 {
3149 /* Anti range 0...N lets us to determine minimal size to N+1. */
3150 if (min.is_zero ())
3151 {
3152 if ((max + double_int_one).fits_uhwi ())
3153 *min_size = (max + double_int_one).to_uhwi ();
3154 }
3155 /* Code like
3156
3157 int n;
3158 if (n < 100)
3159 memcpy (a, b, n)
3160
3161 Produce anti range allowing negative values of N. We still
3162 can use the information and make a guess that N is not negative.
3163 */
3164 else if (!max.ule (double_int_one.lshift (30))
3165 && min.fits_uhwi ())
3166 *probable_max_size = min.to_uhwi () - 1;
3167 }
3168 }
3169 gcc_checking_assert (*max_size <=
3170 (unsigned HOST_WIDE_INT)
3171 GET_MODE_MASK (GET_MODE (len_rtx)));
3172 }
3173
3174 /* Expand a call EXP to the memcpy builtin.
3175 Return NULL_RTX if we failed, the caller should emit a normal call,
3176 otherwise try to get the result in TARGET, if convenient (and in
3177 mode MODE if that's convenient). */
3178
3179 static rtx
3180 expand_builtin_memcpy (tree exp, rtx target)
3181 {
3182 if (!validate_arglist (exp,
3183 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3184 return NULL_RTX;
3185 else
3186 {
3187 tree dest = CALL_EXPR_ARG (exp, 0);
3188 tree src = CALL_EXPR_ARG (exp, 1);
3189 tree len = CALL_EXPR_ARG (exp, 2);
3190 const char *src_str;
3191 unsigned int src_align = get_pointer_alignment (src);
3192 unsigned int dest_align = get_pointer_alignment (dest);
3193 rtx dest_mem, src_mem, dest_addr, len_rtx;
3194 HOST_WIDE_INT expected_size = -1;
3195 unsigned int expected_align = 0;
3196 unsigned HOST_WIDE_INT min_size;
3197 unsigned HOST_WIDE_INT max_size;
3198 unsigned HOST_WIDE_INT probable_max_size;
3199
3200 /* If DEST is not a pointer type, call the normal function. */
3201 if (dest_align == 0)
3202 return NULL_RTX;
3203
3204 /* If either SRC is not a pointer type, don't do this
3205 operation in-line. */
3206 if (src_align == 0)
3207 return NULL_RTX;
3208
3209 if (currently_expanding_gimple_stmt)
3210 stringop_block_profile (currently_expanding_gimple_stmt,
3211 &expected_align, &expected_size);
3212
3213 if (expected_align < dest_align)
3214 expected_align = dest_align;
3215 dest_mem = get_memory_rtx (dest, len);
3216 set_mem_align (dest_mem, dest_align);
3217 len_rtx = expand_normal (len);
3218 determine_block_size (len, len_rtx, &min_size, &max_size,
3219 &probable_max_size);
3220 src_str = c_getstr (src);
3221
3222 /* If SRC is a string constant and block move would be done
3223 by pieces, we can avoid loading the string from memory
3224 and only stored the computed constants. */
3225 if (src_str
3226 && CONST_INT_P (len_rtx)
3227 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3228 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3229 CONST_CAST (char *, src_str),
3230 dest_align, false))
3231 {
3232 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3233 builtin_memcpy_read_str,
3234 CONST_CAST (char *, src_str),
3235 dest_align, false, 0);
3236 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3237 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3238 return dest_mem;
3239 }
3240
3241 src_mem = get_memory_rtx (src, len);
3242 set_mem_align (src_mem, src_align);
3243
3244 /* Copy word part most expediently. */
3245 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3246 CALL_EXPR_TAILCALL (exp)
3247 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3248 expected_align, expected_size,
3249 min_size, max_size, probable_max_size);
3250
3251 if (dest_addr == 0)
3252 {
3253 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3254 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3255 }
3256 return dest_addr;
3257 }
3258 }
3259
3260 /* Expand a call EXP to the mempcpy builtin.
3261 Return NULL_RTX if we failed; the caller should emit a normal call,
3262 otherwise try to get the result in TARGET, if convenient (and in
3263 mode MODE if that's convenient). If ENDP is 0 return the
3264 destination pointer, if ENDP is 1 return the end pointer ala
3265 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3266 stpcpy. */
3267
3268 static rtx
3269 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3270 {
3271 if (!validate_arglist (exp,
3272 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3273 return NULL_RTX;
3274 else
3275 {
3276 tree dest = CALL_EXPR_ARG (exp, 0);
3277 tree src = CALL_EXPR_ARG (exp, 1);
3278 tree len = CALL_EXPR_ARG (exp, 2);
3279 return expand_builtin_mempcpy_args (dest, src, len,
3280 target, mode, /*endp=*/ 1);
3281 }
3282 }
3283
3284 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3285 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3286 so that this can also be called without constructing an actual CALL_EXPR.
3287 The other arguments and return value are the same as for
3288 expand_builtin_mempcpy. */
3289
3290 static rtx
3291 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3292 rtx target, enum machine_mode mode, int endp)
3293 {
3294 /* If return value is ignored, transform mempcpy into memcpy. */
3295 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3296 {
3297 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3298 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3299 dest, src, len);
3300 return expand_expr (result, target, mode, EXPAND_NORMAL);
3301 }
3302 else
3303 {
3304 const char *src_str;
3305 unsigned int src_align = get_pointer_alignment (src);
3306 unsigned int dest_align = get_pointer_alignment (dest);
3307 rtx dest_mem, src_mem, len_rtx;
3308
3309 /* If either SRC or DEST is not a pointer type, don't do this
3310 operation in-line. */
3311 if (dest_align == 0 || src_align == 0)
3312 return NULL_RTX;
3313
3314 /* If LEN is not constant, call the normal function. */
3315 if (! tree_fits_uhwi_p (len))
3316 return NULL_RTX;
3317
3318 len_rtx = expand_normal (len);
3319 src_str = c_getstr (src);
3320
3321 /* If SRC is a string constant and block move would be done
3322 by pieces, we can avoid loading the string from memory
3323 and only stored the computed constants. */
3324 if (src_str
3325 && CONST_INT_P (len_rtx)
3326 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3327 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3328 CONST_CAST (char *, src_str),
3329 dest_align, false))
3330 {
3331 dest_mem = get_memory_rtx (dest, len);
3332 set_mem_align (dest_mem, dest_align);
3333 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3334 builtin_memcpy_read_str,
3335 CONST_CAST (char *, src_str),
3336 dest_align, false, endp);
3337 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3338 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3339 return dest_mem;
3340 }
3341
3342 if (CONST_INT_P (len_rtx)
3343 && can_move_by_pieces (INTVAL (len_rtx),
3344 MIN (dest_align, src_align)))
3345 {
3346 dest_mem = get_memory_rtx (dest, len);
3347 set_mem_align (dest_mem, dest_align);
3348 src_mem = get_memory_rtx (src, len);
3349 set_mem_align (src_mem, src_align);
3350 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3351 MIN (dest_align, src_align), endp);
3352 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3353 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3354 return dest_mem;
3355 }
3356
3357 return NULL_RTX;
3358 }
3359 }
3360
3361 #ifndef HAVE_movstr
3362 # define HAVE_movstr 0
3363 # define CODE_FOR_movstr CODE_FOR_nothing
3364 #endif
3365
3366 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3367 we failed, the caller should emit a normal call, otherwise try to
3368 get the result in TARGET, if convenient. If ENDP is 0 return the
3369 destination pointer, if ENDP is 1 return the end pointer ala
3370 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3371 stpcpy. */
3372
3373 static rtx
3374 expand_movstr (tree dest, tree src, rtx target, int endp)
3375 {
3376 struct expand_operand ops[3];
3377 rtx dest_mem;
3378 rtx src_mem;
3379
3380 if (!HAVE_movstr)
3381 return NULL_RTX;
3382
3383 dest_mem = get_memory_rtx (dest, NULL);
3384 src_mem = get_memory_rtx (src, NULL);
3385 if (!endp)
3386 {
3387 target = force_reg (Pmode, XEXP (dest_mem, 0));
3388 dest_mem = replace_equiv_address (dest_mem, target);
3389 }
3390
3391 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3392 create_fixed_operand (&ops[1], dest_mem);
3393 create_fixed_operand (&ops[2], src_mem);
3394 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3395 return NULL_RTX;
3396
3397 if (endp && target != const0_rtx)
3398 {
3399 target = ops[0].value;
3400 /* movstr is supposed to set end to the address of the NUL
3401 terminator. If the caller requested a mempcpy-like return value,
3402 adjust it. */
3403 if (endp == 1)
3404 {
3405 rtx tem = plus_constant (GET_MODE (target),
3406 gen_lowpart (GET_MODE (target), target), 1);
3407 emit_move_insn (target, force_operand (tem, NULL_RTX));
3408 }
3409 }
3410 return target;
3411 }
3412
3413 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3414 NULL_RTX if we failed the caller should emit a normal call, otherwise
3415 try to get the result in TARGET, if convenient (and in mode MODE if that's
3416 convenient). */
3417
3418 static rtx
3419 expand_builtin_strcpy (tree exp, rtx target)
3420 {
3421 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3422 {
3423 tree dest = CALL_EXPR_ARG (exp, 0);
3424 tree src = CALL_EXPR_ARG (exp, 1);
3425 return expand_builtin_strcpy_args (dest, src, target);
3426 }
3427 return NULL_RTX;
3428 }
3429
3430 /* Helper function to do the actual work for expand_builtin_strcpy. The
3431 arguments to the builtin_strcpy call DEST and SRC are broken out
3432 so that this can also be called without constructing an actual CALL_EXPR.
3433 The other arguments and return value are the same as for
3434 expand_builtin_strcpy. */
3435
3436 static rtx
3437 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3438 {
3439 return expand_movstr (dest, src, target, /*endp=*/0);
3440 }
3441
3442 /* Expand a call EXP to the stpcpy builtin.
3443 Return NULL_RTX if we failed the caller should emit a normal call,
3444 otherwise try to get the result in TARGET, if convenient (and in
3445 mode MODE if that's convenient). */
3446
3447 static rtx
3448 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3449 {
3450 tree dst, src;
3451 location_t loc = EXPR_LOCATION (exp);
3452
3453 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3454 return NULL_RTX;
3455
3456 dst = CALL_EXPR_ARG (exp, 0);
3457 src = CALL_EXPR_ARG (exp, 1);
3458
3459 /* If return value is ignored, transform stpcpy into strcpy. */
3460 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3461 {
3462 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3463 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3464 return expand_expr (result, target, mode, EXPAND_NORMAL);
3465 }
3466 else
3467 {
3468 tree len, lenp1;
3469 rtx ret;
3470
3471 /* Ensure we get an actual string whose length can be evaluated at
3472 compile-time, not an expression containing a string. This is
3473 because the latter will potentially produce pessimized code
3474 when used to produce the return value. */
3475 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3476 return expand_movstr (dst, src, target, /*endp=*/2);
3477
3478 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3479 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3480 target, mode, /*endp=*/2);
3481
3482 if (ret)
3483 return ret;
3484
3485 if (TREE_CODE (len) == INTEGER_CST)
3486 {
3487 rtx len_rtx = expand_normal (len);
3488
3489 if (CONST_INT_P (len_rtx))
3490 {
3491 ret = expand_builtin_strcpy_args (dst, src, target);
3492
3493 if (ret)
3494 {
3495 if (! target)
3496 {
3497 if (mode != VOIDmode)
3498 target = gen_reg_rtx (mode);
3499 else
3500 target = gen_reg_rtx (GET_MODE (ret));
3501 }
3502 if (GET_MODE (target) != GET_MODE (ret))
3503 ret = gen_lowpart (GET_MODE (target), ret);
3504
3505 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3506 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3507 gcc_assert (ret);
3508
3509 return target;
3510 }
3511 }
3512 }
3513
3514 return expand_movstr (dst, src, target, /*endp=*/2);
3515 }
3516 }
3517
3518 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3519 bytes from constant string DATA + OFFSET and return it as target
3520 constant. */
3521
3522 rtx
3523 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3524 enum machine_mode mode)
3525 {
3526 const char *str = (const char *) data;
3527
3528 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3529 return const0_rtx;
3530
3531 return c_readstr (str + offset, mode);
3532 }
3533
3534 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3535 NULL_RTX if we failed the caller should emit a normal call. */
3536
3537 static rtx
3538 expand_builtin_strncpy (tree exp, rtx target)
3539 {
3540 location_t loc = EXPR_LOCATION (exp);
3541
3542 if (validate_arglist (exp,
3543 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3544 {
3545 tree dest = CALL_EXPR_ARG (exp, 0);
3546 tree src = CALL_EXPR_ARG (exp, 1);
3547 tree len = CALL_EXPR_ARG (exp, 2);
3548 tree slen = c_strlen (src, 1);
3549
3550 /* We must be passed a constant len and src parameter. */
3551 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3552 return NULL_RTX;
3553
3554 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3555
3556 /* We're required to pad with trailing zeros if the requested
3557 len is greater than strlen(s2)+1. In that case try to
3558 use store_by_pieces, if it fails, punt. */
3559 if (tree_int_cst_lt (slen, len))
3560 {
3561 unsigned int dest_align = get_pointer_alignment (dest);
3562 const char *p = c_getstr (src);
3563 rtx dest_mem;
3564
3565 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3566 || !can_store_by_pieces (tree_to_uhwi (len),
3567 builtin_strncpy_read_str,
3568 CONST_CAST (char *, p),
3569 dest_align, false))
3570 return NULL_RTX;
3571
3572 dest_mem = get_memory_rtx (dest, len);
3573 store_by_pieces (dest_mem, tree_to_uhwi (len),
3574 builtin_strncpy_read_str,
3575 CONST_CAST (char *, p), dest_align, false, 0);
3576 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3577 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3578 return dest_mem;
3579 }
3580 }
3581 return NULL_RTX;
3582 }
3583
3584 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3585 bytes from constant string DATA + OFFSET and return it as target
3586 constant. */
3587
3588 rtx
3589 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3590 enum machine_mode mode)
3591 {
3592 const char *c = (const char *) data;
3593 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3594
3595 memset (p, *c, GET_MODE_SIZE (mode));
3596
3597 return c_readstr (p, mode);
3598 }
3599
3600 /* Callback routine for store_by_pieces. Return the RTL of a register
3601 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3602 char value given in the RTL register data. For example, if mode is
3603 4 bytes wide, return the RTL for 0x01010101*data. */
3604
3605 static rtx
3606 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3607 enum machine_mode mode)
3608 {
3609 rtx target, coeff;
3610 size_t size;
3611 char *p;
3612
3613 size = GET_MODE_SIZE (mode);
3614 if (size == 1)
3615 return (rtx) data;
3616
3617 p = XALLOCAVEC (char, size);
3618 memset (p, 1, size);
3619 coeff = c_readstr (p, mode);
3620
3621 target = convert_to_mode (mode, (rtx) data, 1);
3622 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3623 return force_reg (mode, target);
3624 }
3625
3626 /* Expand expression EXP, which is a call to the memset builtin. Return
3627 NULL_RTX if we failed the caller should emit a normal call, otherwise
3628 try to get the result in TARGET, if convenient (and in mode MODE if that's
3629 convenient). */
3630
3631 static rtx
3632 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3633 {
3634 if (!validate_arglist (exp,
3635 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3636 return NULL_RTX;
3637 else
3638 {
3639 tree dest = CALL_EXPR_ARG (exp, 0);
3640 tree val = CALL_EXPR_ARG (exp, 1);
3641 tree len = CALL_EXPR_ARG (exp, 2);
3642 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3643 }
3644 }
3645
3646 /* Helper function to do the actual work for expand_builtin_memset. The
3647 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3648 so that this can also be called without constructing an actual CALL_EXPR.
3649 The other arguments and return value are the same as for
3650 expand_builtin_memset. */
3651
3652 static rtx
3653 expand_builtin_memset_args (tree dest, tree val, tree len,
3654 rtx target, enum machine_mode mode, tree orig_exp)
3655 {
3656 tree fndecl, fn;
3657 enum built_in_function fcode;
3658 enum machine_mode val_mode;
3659 char c;
3660 unsigned int dest_align;
3661 rtx dest_mem, dest_addr, len_rtx;
3662 HOST_WIDE_INT expected_size = -1;
3663 unsigned int expected_align = 0;
3664 unsigned HOST_WIDE_INT min_size;
3665 unsigned HOST_WIDE_INT max_size;
3666 unsigned HOST_WIDE_INT probable_max_size;
3667
3668 dest_align = get_pointer_alignment (dest);
3669
3670 /* If DEST is not a pointer type, don't do this operation in-line. */
3671 if (dest_align == 0)
3672 return NULL_RTX;
3673
3674 if (currently_expanding_gimple_stmt)
3675 stringop_block_profile (currently_expanding_gimple_stmt,
3676 &expected_align, &expected_size);
3677
3678 if (expected_align < dest_align)
3679 expected_align = dest_align;
3680
3681 /* If the LEN parameter is zero, return DEST. */
3682 if (integer_zerop (len))
3683 {
3684 /* Evaluate and ignore VAL in case it has side-effects. */
3685 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3686 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3687 }
3688
3689 /* Stabilize the arguments in case we fail. */
3690 dest = builtin_save_expr (dest);
3691 val = builtin_save_expr (val);
3692 len = builtin_save_expr (len);
3693
3694 len_rtx = expand_normal (len);
3695 determine_block_size (len, len_rtx, &min_size, &max_size,
3696 &probable_max_size);
3697 dest_mem = get_memory_rtx (dest, len);
3698 val_mode = TYPE_MODE (unsigned_char_type_node);
3699
3700 if (TREE_CODE (val) != INTEGER_CST)
3701 {
3702 rtx val_rtx;
3703
3704 val_rtx = expand_normal (val);
3705 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3706
3707 /* Assume that we can memset by pieces if we can store
3708 * the coefficients by pieces (in the required modes).
3709 * We can't pass builtin_memset_gen_str as that emits RTL. */
3710 c = 1;
3711 if (tree_fits_uhwi_p (len)
3712 && can_store_by_pieces (tree_to_uhwi (len),
3713 builtin_memset_read_str, &c, dest_align,
3714 true))
3715 {
3716 val_rtx = force_reg (val_mode, val_rtx);
3717 store_by_pieces (dest_mem, tree_to_uhwi (len),
3718 builtin_memset_gen_str, val_rtx, dest_align,
3719 true, 0);
3720 }
3721 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3722 dest_align, expected_align,
3723 expected_size, min_size, max_size,
3724 probable_max_size))
3725 goto do_libcall;
3726
3727 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3728 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3729 return dest_mem;
3730 }
3731
3732 if (target_char_cast (val, &c))
3733 goto do_libcall;
3734
3735 if (c)
3736 {
3737 if (tree_fits_uhwi_p (len)
3738 && can_store_by_pieces (tree_to_uhwi (len),
3739 builtin_memset_read_str, &c, dest_align,
3740 true))
3741 store_by_pieces (dest_mem, tree_to_uhwi (len),
3742 builtin_memset_read_str, &c, dest_align, true, 0);
3743 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3744 gen_int_mode (c, val_mode),
3745 dest_align, expected_align,
3746 expected_size, min_size, max_size,
3747 probable_max_size))
3748 goto do_libcall;
3749
3750 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3751 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3752 return dest_mem;
3753 }
3754
3755 set_mem_align (dest_mem, dest_align);
3756 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3757 CALL_EXPR_TAILCALL (orig_exp)
3758 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3759 expected_align, expected_size,
3760 min_size, max_size,
3761 probable_max_size);
3762
3763 if (dest_addr == 0)
3764 {
3765 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3766 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3767 }
3768
3769 return dest_addr;
3770
3771 do_libcall:
3772 fndecl = get_callee_fndecl (orig_exp);
3773 fcode = DECL_FUNCTION_CODE (fndecl);
3774 if (fcode == BUILT_IN_MEMSET)
3775 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3776 dest, val, len);
3777 else if (fcode == BUILT_IN_BZERO)
3778 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3779 dest, len);
3780 else
3781 gcc_unreachable ();
3782 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3783 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3784 return expand_call (fn, target, target == const0_rtx);
3785 }
3786
3787 /* Expand expression EXP, which is a call to the bzero builtin. Return
3788 NULL_RTX if we failed the caller should emit a normal call. */
3789
3790 static rtx
3791 expand_builtin_bzero (tree exp)
3792 {
3793 tree dest, size;
3794 location_t loc = EXPR_LOCATION (exp);
3795
3796 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3797 return NULL_RTX;
3798
3799 dest = CALL_EXPR_ARG (exp, 0);
3800 size = CALL_EXPR_ARG (exp, 1);
3801
3802 /* New argument list transforming bzero(ptr x, int y) to
3803 memset(ptr x, int 0, size_t y). This is done this way
3804 so that if it isn't expanded inline, we fallback to
3805 calling bzero instead of memset. */
3806
3807 return expand_builtin_memset_args (dest, integer_zero_node,
3808 fold_convert_loc (loc,
3809 size_type_node, size),
3810 const0_rtx, VOIDmode, exp);
3811 }
3812
3813 /* Expand expression EXP, which is a call to the memcmp built-in function.
3814 Return NULL_RTX if we failed and the caller should emit a normal call,
3815 otherwise try to get the result in TARGET, if convenient (and in mode
3816 MODE, if that's convenient). */
3817
3818 static rtx
3819 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3820 ATTRIBUTE_UNUSED enum machine_mode mode)
3821 {
3822 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3823
3824 if (!validate_arglist (exp,
3825 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3826 return NULL_RTX;
3827
3828 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3829 implementing memcmp because it will stop if it encounters two
3830 zero bytes. */
3831 #if defined HAVE_cmpmemsi
3832 {
3833 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3834 rtx result;
3835 rtx insn;
3836 tree arg1 = CALL_EXPR_ARG (exp, 0);
3837 tree arg2 = CALL_EXPR_ARG (exp, 1);
3838 tree len = CALL_EXPR_ARG (exp, 2);
3839
3840 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3841 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3842 enum machine_mode insn_mode;
3843
3844 if (HAVE_cmpmemsi)
3845 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3846 else
3847 return NULL_RTX;
3848
3849 /* If we don't have POINTER_TYPE, call the function. */
3850 if (arg1_align == 0 || arg2_align == 0)
3851 return NULL_RTX;
3852
3853 /* Make a place to write the result of the instruction. */
3854 result = target;
3855 if (! (result != 0
3856 && REG_P (result) && GET_MODE (result) == insn_mode
3857 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3858 result = gen_reg_rtx (insn_mode);
3859
3860 arg1_rtx = get_memory_rtx (arg1, len);
3861 arg2_rtx = get_memory_rtx (arg2, len);
3862 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3863
3864 /* Set MEM_SIZE as appropriate. */
3865 if (CONST_INT_P (arg3_rtx))
3866 {
3867 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3868 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3869 }
3870
3871 if (HAVE_cmpmemsi)
3872 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3873 GEN_INT (MIN (arg1_align, arg2_align)));
3874 else
3875 gcc_unreachable ();
3876
3877 if (insn)
3878 emit_insn (insn);
3879 else
3880 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3881 TYPE_MODE (integer_type_node), 3,
3882 XEXP (arg1_rtx, 0), Pmode,
3883 XEXP (arg2_rtx, 0), Pmode,
3884 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3885 TYPE_UNSIGNED (sizetype)),
3886 TYPE_MODE (sizetype));
3887
3888 /* Return the value in the proper mode for this function. */
3889 mode = TYPE_MODE (TREE_TYPE (exp));
3890 if (GET_MODE (result) == mode)
3891 return result;
3892 else if (target != 0)
3893 {
3894 convert_move (target, result, 0);
3895 return target;
3896 }
3897 else
3898 return convert_to_mode (mode, result, 0);
3899 }
3900 #endif /* HAVE_cmpmemsi. */
3901
3902 return NULL_RTX;
3903 }
3904
3905 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3906 if we failed the caller should emit a normal call, otherwise try to get
3907 the result in TARGET, if convenient. */
3908
3909 static rtx
3910 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3911 {
3912 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3913 return NULL_RTX;
3914
3915 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3916 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3917 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3918 {
3919 rtx arg1_rtx, arg2_rtx;
3920 rtx result, insn = NULL_RTX;
3921 tree fndecl, fn;
3922 tree arg1 = CALL_EXPR_ARG (exp, 0);
3923 tree arg2 = CALL_EXPR_ARG (exp, 1);
3924
3925 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3926 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3927
3928 /* If we don't have POINTER_TYPE, call the function. */
3929 if (arg1_align == 0 || arg2_align == 0)
3930 return NULL_RTX;
3931
3932 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3933 arg1 = builtin_save_expr (arg1);
3934 arg2 = builtin_save_expr (arg2);
3935
3936 arg1_rtx = get_memory_rtx (arg1, NULL);
3937 arg2_rtx = get_memory_rtx (arg2, NULL);
3938
3939 #ifdef HAVE_cmpstrsi
3940 /* Try to call cmpstrsi. */
3941 if (HAVE_cmpstrsi)
3942 {
3943 enum machine_mode insn_mode
3944 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3945
3946 /* Make a place to write the result of the instruction. */
3947 result = target;
3948 if (! (result != 0
3949 && REG_P (result) && GET_MODE (result) == insn_mode
3950 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3951 result = gen_reg_rtx (insn_mode);
3952
3953 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3954 GEN_INT (MIN (arg1_align, arg2_align)));
3955 }
3956 #endif
3957 #ifdef HAVE_cmpstrnsi
3958 /* Try to determine at least one length and call cmpstrnsi. */
3959 if (!insn && HAVE_cmpstrnsi)
3960 {
3961 tree len;
3962 rtx arg3_rtx;
3963
3964 enum machine_mode insn_mode
3965 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3966 tree len1 = c_strlen (arg1, 1);
3967 tree len2 = c_strlen (arg2, 1);
3968
3969 if (len1)
3970 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3971 if (len2)
3972 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3973
3974 /* If we don't have a constant length for the first, use the length
3975 of the second, if we know it. We don't require a constant for
3976 this case; some cost analysis could be done if both are available
3977 but neither is constant. For now, assume they're equally cheap,
3978 unless one has side effects. If both strings have constant lengths,
3979 use the smaller. */
3980
3981 if (!len1)
3982 len = len2;
3983 else if (!len2)
3984 len = len1;
3985 else if (TREE_SIDE_EFFECTS (len1))
3986 len = len2;
3987 else if (TREE_SIDE_EFFECTS (len2))
3988 len = len1;
3989 else if (TREE_CODE (len1) != INTEGER_CST)
3990 len = len2;
3991 else if (TREE_CODE (len2) != INTEGER_CST)
3992 len = len1;
3993 else if (tree_int_cst_lt (len1, len2))
3994 len = len1;
3995 else
3996 len = len2;
3997
3998 /* If both arguments have side effects, we cannot optimize. */
3999 if (!len || TREE_SIDE_EFFECTS (len))
4000 goto do_libcall;
4001
4002 arg3_rtx = expand_normal (len);
4003
4004 /* Make a place to write the result of the instruction. */
4005 result = target;
4006 if (! (result != 0
4007 && REG_P (result) && GET_MODE (result) == insn_mode
4008 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4009 result = gen_reg_rtx (insn_mode);
4010
4011 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4012 GEN_INT (MIN (arg1_align, arg2_align)));
4013 }
4014 #endif
4015
4016 if (insn)
4017 {
4018 enum machine_mode mode;
4019 emit_insn (insn);
4020
4021 /* Return the value in the proper mode for this function. */
4022 mode = TYPE_MODE (TREE_TYPE (exp));
4023 if (GET_MODE (result) == mode)
4024 return result;
4025 if (target == 0)
4026 return convert_to_mode (mode, result, 0);
4027 convert_move (target, result, 0);
4028 return target;
4029 }
4030
4031 /* Expand the library call ourselves using a stabilized argument
4032 list to avoid re-evaluating the function's arguments twice. */
4033 #ifdef HAVE_cmpstrnsi
4034 do_libcall:
4035 #endif
4036 fndecl = get_callee_fndecl (exp);
4037 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4038 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4039 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4040 return expand_call (fn, target, target == const0_rtx);
4041 }
4042 #endif
4043 return NULL_RTX;
4044 }
4045
4046 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4047 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4048 the result in TARGET, if convenient. */
4049
4050 static rtx
4051 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4052 ATTRIBUTE_UNUSED enum machine_mode mode)
4053 {
4054 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4055
4056 if (!validate_arglist (exp,
4057 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4058 return NULL_RTX;
4059
4060 /* If c_strlen can determine an expression for one of the string
4061 lengths, and it doesn't have side effects, then emit cmpstrnsi
4062 using length MIN(strlen(string)+1, arg3). */
4063 #ifdef HAVE_cmpstrnsi
4064 if (HAVE_cmpstrnsi)
4065 {
4066 tree len, len1, len2;
4067 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4068 rtx result, insn;
4069 tree fndecl, fn;
4070 tree arg1 = CALL_EXPR_ARG (exp, 0);
4071 tree arg2 = CALL_EXPR_ARG (exp, 1);
4072 tree arg3 = CALL_EXPR_ARG (exp, 2);
4073
4074 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4075 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4076 enum machine_mode insn_mode
4077 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4078
4079 len1 = c_strlen (arg1, 1);
4080 len2 = c_strlen (arg2, 1);
4081
4082 if (len1)
4083 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4084 if (len2)
4085 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4086
4087 /* If we don't have a constant length for the first, use the length
4088 of the second, if we know it. We don't require a constant for
4089 this case; some cost analysis could be done if both are available
4090 but neither is constant. For now, assume they're equally cheap,
4091 unless one has side effects. If both strings have constant lengths,
4092 use the smaller. */
4093
4094 if (!len1)
4095 len = len2;
4096 else if (!len2)
4097 len = len1;
4098 else if (TREE_SIDE_EFFECTS (len1))
4099 len = len2;
4100 else if (TREE_SIDE_EFFECTS (len2))
4101 len = len1;
4102 else if (TREE_CODE (len1) != INTEGER_CST)
4103 len = len2;
4104 else if (TREE_CODE (len2) != INTEGER_CST)
4105 len = len1;
4106 else if (tree_int_cst_lt (len1, len2))
4107 len = len1;
4108 else
4109 len = len2;
4110
4111 /* If both arguments have side effects, we cannot optimize. */
4112 if (!len || TREE_SIDE_EFFECTS (len))
4113 return NULL_RTX;
4114
4115 /* The actual new length parameter is MIN(len,arg3). */
4116 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4117 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4118
4119 /* If we don't have POINTER_TYPE, call the function. */
4120 if (arg1_align == 0 || arg2_align == 0)
4121 return NULL_RTX;
4122
4123 /* Make a place to write the result of the instruction. */
4124 result = target;
4125 if (! (result != 0
4126 && REG_P (result) && GET_MODE (result) == insn_mode
4127 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4128 result = gen_reg_rtx (insn_mode);
4129
4130 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4131 arg1 = builtin_save_expr (arg1);
4132 arg2 = builtin_save_expr (arg2);
4133 len = builtin_save_expr (len);
4134
4135 arg1_rtx = get_memory_rtx (arg1, len);
4136 arg2_rtx = get_memory_rtx (arg2, len);
4137 arg3_rtx = expand_normal (len);
4138 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4139 GEN_INT (MIN (arg1_align, arg2_align)));
4140 if (insn)
4141 {
4142 emit_insn (insn);
4143
4144 /* Return the value in the proper mode for this function. */
4145 mode = TYPE_MODE (TREE_TYPE (exp));
4146 if (GET_MODE (result) == mode)
4147 return result;
4148 if (target == 0)
4149 return convert_to_mode (mode, result, 0);
4150 convert_move (target, result, 0);
4151 return target;
4152 }
4153
4154 /* Expand the library call ourselves using a stabilized argument
4155 list to avoid re-evaluating the function's arguments twice. */
4156 fndecl = get_callee_fndecl (exp);
4157 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4158 arg1, arg2, len);
4159 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4160 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4161 return expand_call (fn, target, target == const0_rtx);
4162 }
4163 #endif
4164 return NULL_RTX;
4165 }
4166
4167 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4168 if that's convenient. */
4169
4170 rtx
4171 expand_builtin_saveregs (void)
4172 {
4173 rtx val, seq;
4174
4175 /* Don't do __builtin_saveregs more than once in a function.
4176 Save the result of the first call and reuse it. */
4177 if (saveregs_value != 0)
4178 return saveregs_value;
4179
4180 /* When this function is called, it means that registers must be
4181 saved on entry to this function. So we migrate the call to the
4182 first insn of this function. */
4183
4184 start_sequence ();
4185
4186 /* Do whatever the machine needs done in this case. */
4187 val = targetm.calls.expand_builtin_saveregs ();
4188
4189 seq = get_insns ();
4190 end_sequence ();
4191
4192 saveregs_value = val;
4193
4194 /* Put the insns after the NOTE that starts the function. If this
4195 is inside a start_sequence, make the outer-level insn chain current, so
4196 the code is placed at the start of the function. */
4197 push_topmost_sequence ();
4198 emit_insn_after (seq, entry_of_function ());
4199 pop_topmost_sequence ();
4200
4201 return val;
4202 }
4203
4204 /* Expand a call to __builtin_next_arg. */
4205
4206 static rtx
4207 expand_builtin_next_arg (void)
4208 {
4209 /* Checking arguments is already done in fold_builtin_next_arg
4210 that must be called before this function. */
4211 return expand_binop (ptr_mode, add_optab,
4212 crtl->args.internal_arg_pointer,
4213 crtl->args.arg_offset_rtx,
4214 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4215 }
4216
4217 /* Make it easier for the backends by protecting the valist argument
4218 from multiple evaluations. */
4219
4220 static tree
4221 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4222 {
4223 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4224
4225 /* The current way of determining the type of valist is completely
4226 bogus. We should have the information on the va builtin instead. */
4227 if (!vatype)
4228 vatype = targetm.fn_abi_va_list (cfun->decl);
4229
4230 if (TREE_CODE (vatype) == ARRAY_TYPE)
4231 {
4232 if (TREE_SIDE_EFFECTS (valist))
4233 valist = save_expr (valist);
4234
4235 /* For this case, the backends will be expecting a pointer to
4236 vatype, but it's possible we've actually been given an array
4237 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4238 So fix it. */
4239 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4240 {
4241 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4242 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4243 }
4244 }
4245 else
4246 {
4247 tree pt = build_pointer_type (vatype);
4248
4249 if (! needs_lvalue)
4250 {
4251 if (! TREE_SIDE_EFFECTS (valist))
4252 return valist;
4253
4254 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4255 TREE_SIDE_EFFECTS (valist) = 1;
4256 }
4257
4258 if (TREE_SIDE_EFFECTS (valist))
4259 valist = save_expr (valist);
4260 valist = fold_build2_loc (loc, MEM_REF,
4261 vatype, valist, build_int_cst (pt, 0));
4262 }
4263
4264 return valist;
4265 }
4266
4267 /* The "standard" definition of va_list is void*. */
4268
4269 tree
4270 std_build_builtin_va_list (void)
4271 {
4272 return ptr_type_node;
4273 }
4274
4275 /* The "standard" abi va_list is va_list_type_node. */
4276
4277 tree
4278 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4279 {
4280 return va_list_type_node;
4281 }
4282
4283 /* The "standard" type of va_list is va_list_type_node. */
4284
4285 tree
4286 std_canonical_va_list_type (tree type)
4287 {
4288 tree wtype, htype;
4289
4290 if (INDIRECT_REF_P (type))
4291 type = TREE_TYPE (type);
4292 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4293 type = TREE_TYPE (type);
4294 wtype = va_list_type_node;
4295 htype = type;
4296 /* Treat structure va_list types. */
4297 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4298 htype = TREE_TYPE (htype);
4299 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4300 {
4301 /* If va_list is an array type, the argument may have decayed
4302 to a pointer type, e.g. by being passed to another function.
4303 In that case, unwrap both types so that we can compare the
4304 underlying records. */
4305 if (TREE_CODE (htype) == ARRAY_TYPE
4306 || POINTER_TYPE_P (htype))
4307 {
4308 wtype = TREE_TYPE (wtype);
4309 htype = TREE_TYPE (htype);
4310 }
4311 }
4312 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4313 return va_list_type_node;
4314
4315 return NULL_TREE;
4316 }
4317
4318 /* The "standard" implementation of va_start: just assign `nextarg' to
4319 the variable. */
4320
4321 void
4322 std_expand_builtin_va_start (tree valist, rtx nextarg)
4323 {
4324 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4325 convert_move (va_r, nextarg, 0);
4326 }
4327
4328 /* Expand EXP, a call to __builtin_va_start. */
4329
4330 static rtx
4331 expand_builtin_va_start (tree exp)
4332 {
4333 rtx nextarg;
4334 tree valist;
4335 location_t loc = EXPR_LOCATION (exp);
4336
4337 if (call_expr_nargs (exp) < 2)
4338 {
4339 error_at (loc, "too few arguments to function %<va_start%>");
4340 return const0_rtx;
4341 }
4342
4343 if (fold_builtin_next_arg (exp, true))
4344 return const0_rtx;
4345
4346 nextarg = expand_builtin_next_arg ();
4347 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4348
4349 if (targetm.expand_builtin_va_start)
4350 targetm.expand_builtin_va_start (valist, nextarg);
4351 else
4352 std_expand_builtin_va_start (valist, nextarg);
4353
4354 return const0_rtx;
4355 }
4356
4357 /* Expand EXP, a call to __builtin_va_end. */
4358
4359 static rtx
4360 expand_builtin_va_end (tree exp)
4361 {
4362 tree valist = CALL_EXPR_ARG (exp, 0);
4363
4364 /* Evaluate for side effects, if needed. I hate macros that don't
4365 do that. */
4366 if (TREE_SIDE_EFFECTS (valist))
4367 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4368
4369 return const0_rtx;
4370 }
4371
4372 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4373 builtin rather than just as an assignment in stdarg.h because of the
4374 nastiness of array-type va_list types. */
4375
4376 static rtx
4377 expand_builtin_va_copy (tree exp)
4378 {
4379 tree dst, src, t;
4380 location_t loc = EXPR_LOCATION (exp);
4381
4382 dst = CALL_EXPR_ARG (exp, 0);
4383 src = CALL_EXPR_ARG (exp, 1);
4384
4385 dst = stabilize_va_list_loc (loc, dst, 1);
4386 src = stabilize_va_list_loc (loc, src, 0);
4387
4388 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4389
4390 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4391 {
4392 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4393 TREE_SIDE_EFFECTS (t) = 1;
4394 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4395 }
4396 else
4397 {
4398 rtx dstb, srcb, size;
4399
4400 /* Evaluate to pointers. */
4401 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4402 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4403 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4404 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4405
4406 dstb = convert_memory_address (Pmode, dstb);
4407 srcb = convert_memory_address (Pmode, srcb);
4408
4409 /* "Dereference" to BLKmode memories. */
4410 dstb = gen_rtx_MEM (BLKmode, dstb);
4411 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4412 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4413 srcb = gen_rtx_MEM (BLKmode, srcb);
4414 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4415 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4416
4417 /* Copy. */
4418 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4419 }
4420
4421 return const0_rtx;
4422 }
4423
4424 /* Expand a call to one of the builtin functions __builtin_frame_address or
4425 __builtin_return_address. */
4426
4427 static rtx
4428 expand_builtin_frame_address (tree fndecl, tree exp)
4429 {
4430 /* The argument must be a nonnegative integer constant.
4431 It counts the number of frames to scan up the stack.
4432 The value is the return address saved in that frame. */
4433 if (call_expr_nargs (exp) == 0)
4434 /* Warning about missing arg was already issued. */
4435 return const0_rtx;
4436 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4437 {
4438 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4439 error ("invalid argument to %<__builtin_frame_address%>");
4440 else
4441 error ("invalid argument to %<__builtin_return_address%>");
4442 return const0_rtx;
4443 }
4444 else
4445 {
4446 rtx tem
4447 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4448 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4449
4450 /* Some ports cannot access arbitrary stack frames. */
4451 if (tem == NULL)
4452 {
4453 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4454 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4455 else
4456 warning (0, "unsupported argument to %<__builtin_return_address%>");
4457 return const0_rtx;
4458 }
4459
4460 /* For __builtin_frame_address, return what we've got. */
4461 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4462 return tem;
4463
4464 if (!REG_P (tem)
4465 && ! CONSTANT_P (tem))
4466 tem = copy_addr_to_reg (tem);
4467 return tem;
4468 }
4469 }
4470
4471 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4472 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4473 is the same as for allocate_dynamic_stack_space. */
4474
4475 static rtx
4476 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4477 {
4478 rtx op0;
4479 rtx result;
4480 bool valid_arglist;
4481 unsigned int align;
4482 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4483 == BUILT_IN_ALLOCA_WITH_ALIGN);
4484
4485 valid_arglist
4486 = (alloca_with_align
4487 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4488 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4489
4490 if (!valid_arglist)
4491 return NULL_RTX;
4492
4493 /* Compute the argument. */
4494 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4495
4496 /* Compute the alignment. */
4497 align = (alloca_with_align
4498 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4499 : BIGGEST_ALIGNMENT);
4500
4501 /* Allocate the desired space. */
4502 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4503 result = convert_memory_address (ptr_mode, result);
4504
4505 return result;
4506 }
4507
4508 /* Expand a call to bswap builtin in EXP.
4509 Return NULL_RTX if a normal call should be emitted rather than expanding the
4510 function in-line. If convenient, the result should be placed in TARGET.
4511 SUBTARGET may be used as the target for computing one of EXP's operands. */
4512
4513 static rtx
4514 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4515 rtx subtarget)
4516 {
4517 tree arg;
4518 rtx op0;
4519
4520 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4521 return NULL_RTX;
4522
4523 arg = CALL_EXPR_ARG (exp, 0);
4524 op0 = expand_expr (arg,
4525 subtarget && GET_MODE (subtarget) == target_mode
4526 ? subtarget : NULL_RTX,
4527 target_mode, EXPAND_NORMAL);
4528 if (GET_MODE (op0) != target_mode)
4529 op0 = convert_to_mode (target_mode, op0, 1);
4530
4531 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4532
4533 gcc_assert (target);
4534
4535 return convert_to_mode (target_mode, target, 1);
4536 }
4537
4538 /* Expand a call to a unary builtin in EXP.
4539 Return NULL_RTX if a normal call should be emitted rather than expanding the
4540 function in-line. If convenient, the result should be placed in TARGET.
4541 SUBTARGET may be used as the target for computing one of EXP's operands. */
4542
4543 static rtx
4544 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4545 rtx subtarget, optab op_optab)
4546 {
4547 rtx op0;
4548
4549 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4550 return NULL_RTX;
4551
4552 /* Compute the argument. */
4553 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4554 (subtarget
4555 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4556 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4557 VOIDmode, EXPAND_NORMAL);
4558 /* Compute op, into TARGET if possible.
4559 Set TARGET to wherever the result comes back. */
4560 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4561 op_optab, op0, target, op_optab != clrsb_optab);
4562 gcc_assert (target);
4563
4564 return convert_to_mode (target_mode, target, 0);
4565 }
4566
4567 /* Expand a call to __builtin_expect. We just return our argument
4568 as the builtin_expect semantic should've been already executed by
4569 tree branch prediction pass. */
4570
4571 static rtx
4572 expand_builtin_expect (tree exp, rtx target)
4573 {
4574 tree arg;
4575
4576 if (call_expr_nargs (exp) < 2)
4577 return const0_rtx;
4578 arg = CALL_EXPR_ARG (exp, 0);
4579
4580 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4581 /* When guessing was done, the hints should be already stripped away. */
4582 gcc_assert (!flag_guess_branch_prob
4583 || optimize == 0 || seen_error ());
4584 return target;
4585 }
4586
4587 /* Expand a call to __builtin_assume_aligned. We just return our first
4588 argument as the builtin_assume_aligned semantic should've been already
4589 executed by CCP. */
4590
4591 static rtx
4592 expand_builtin_assume_aligned (tree exp, rtx target)
4593 {
4594 if (call_expr_nargs (exp) < 2)
4595 return const0_rtx;
4596 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4597 EXPAND_NORMAL);
4598 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4599 && (call_expr_nargs (exp) < 3
4600 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4601 return target;
4602 }
4603
4604 void
4605 expand_builtin_trap (void)
4606 {
4607 #ifdef HAVE_trap
4608 if (HAVE_trap)
4609 {
4610 rtx insn = emit_insn (gen_trap ());
4611 /* For trap insns when not accumulating outgoing args force
4612 REG_ARGS_SIZE note to prevent crossjumping of calls with
4613 different args sizes. */
4614 if (!ACCUMULATE_OUTGOING_ARGS)
4615 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4616 }
4617 else
4618 #endif
4619 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4620 emit_barrier ();
4621 }
4622
4623 /* Expand a call to __builtin_unreachable. We do nothing except emit
4624 a barrier saying that control flow will not pass here.
4625
4626 It is the responsibility of the program being compiled to ensure
4627 that control flow does never reach __builtin_unreachable. */
4628 static void
4629 expand_builtin_unreachable (void)
4630 {
4631 emit_barrier ();
4632 }
4633
4634 /* Expand EXP, a call to fabs, fabsf or fabsl.
4635 Return NULL_RTX if a normal call should be emitted rather than expanding
4636 the function inline. If convenient, the result should be placed
4637 in TARGET. SUBTARGET may be used as the target for computing
4638 the operand. */
4639
4640 static rtx
4641 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4642 {
4643 enum machine_mode mode;
4644 tree arg;
4645 rtx op0;
4646
4647 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4648 return NULL_RTX;
4649
4650 arg = CALL_EXPR_ARG (exp, 0);
4651 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4652 mode = TYPE_MODE (TREE_TYPE (arg));
4653 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4654 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4655 }
4656
4657 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4658 Return NULL is a normal call should be emitted rather than expanding the
4659 function inline. If convenient, the result should be placed in TARGET.
4660 SUBTARGET may be used as the target for computing the operand. */
4661
4662 static rtx
4663 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4664 {
4665 rtx op0, op1;
4666 tree arg;
4667
4668 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4669 return NULL_RTX;
4670
4671 arg = CALL_EXPR_ARG (exp, 0);
4672 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4673
4674 arg = CALL_EXPR_ARG (exp, 1);
4675 op1 = expand_normal (arg);
4676
4677 return expand_copysign (op0, op1, target);
4678 }
4679
4680 /* Create a new constant string literal and return a char* pointer to it.
4681 The STRING_CST value is the LEN characters at STR. */
4682 tree
4683 build_string_literal (int len, const char *str)
4684 {
4685 tree t, elem, index, type;
4686
4687 t = build_string (len, str);
4688 elem = build_type_variant (char_type_node, 1, 0);
4689 index = build_index_type (size_int (len - 1));
4690 type = build_array_type (elem, index);
4691 TREE_TYPE (t) = type;
4692 TREE_CONSTANT (t) = 1;
4693 TREE_READONLY (t) = 1;
4694 TREE_STATIC (t) = 1;
4695
4696 type = build_pointer_type (elem);
4697 t = build1 (ADDR_EXPR, type,
4698 build4 (ARRAY_REF, elem,
4699 t, integer_zero_node, NULL_TREE, NULL_TREE));
4700 return t;
4701 }
4702
4703 /* Expand a call to __builtin___clear_cache. */
4704
4705 static rtx
4706 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4707 {
4708 #ifndef HAVE_clear_cache
4709 #ifdef CLEAR_INSN_CACHE
4710 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4711 does something. Just do the default expansion to a call to
4712 __clear_cache(). */
4713 return NULL_RTX;
4714 #else
4715 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4716 does nothing. There is no need to call it. Do nothing. */
4717 return const0_rtx;
4718 #endif /* CLEAR_INSN_CACHE */
4719 #else
4720 /* We have a "clear_cache" insn, and it will handle everything. */
4721 tree begin, end;
4722 rtx begin_rtx, end_rtx;
4723
4724 /* We must not expand to a library call. If we did, any
4725 fallback library function in libgcc that might contain a call to
4726 __builtin___clear_cache() would recurse infinitely. */
4727 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4728 {
4729 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4730 return const0_rtx;
4731 }
4732
4733 if (HAVE_clear_cache)
4734 {
4735 struct expand_operand ops[2];
4736
4737 begin = CALL_EXPR_ARG (exp, 0);
4738 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4739
4740 end = CALL_EXPR_ARG (exp, 1);
4741 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4742
4743 create_address_operand (&ops[0], begin_rtx);
4744 create_address_operand (&ops[1], end_rtx);
4745 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4746 return const0_rtx;
4747 }
4748 return const0_rtx;
4749 #endif /* HAVE_clear_cache */
4750 }
4751
4752 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4753
4754 static rtx
4755 round_trampoline_addr (rtx tramp)
4756 {
4757 rtx temp, addend, mask;
4758
4759 /* If we don't need too much alignment, we'll have been guaranteed
4760 proper alignment by get_trampoline_type. */
4761 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4762 return tramp;
4763
4764 /* Round address up to desired boundary. */
4765 temp = gen_reg_rtx (Pmode);
4766 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4767 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4768
4769 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4770 temp, 0, OPTAB_LIB_WIDEN);
4771 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4772 temp, 0, OPTAB_LIB_WIDEN);
4773
4774 return tramp;
4775 }
4776
4777 static rtx
4778 expand_builtin_init_trampoline (tree exp, bool onstack)
4779 {
4780 tree t_tramp, t_func, t_chain;
4781 rtx m_tramp, r_tramp, r_chain, tmp;
4782
4783 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4784 POINTER_TYPE, VOID_TYPE))
4785 return NULL_RTX;
4786
4787 t_tramp = CALL_EXPR_ARG (exp, 0);
4788 t_func = CALL_EXPR_ARG (exp, 1);
4789 t_chain = CALL_EXPR_ARG (exp, 2);
4790
4791 r_tramp = expand_normal (t_tramp);
4792 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4793 MEM_NOTRAP_P (m_tramp) = 1;
4794
4795 /* If ONSTACK, the TRAMP argument should be the address of a field
4796 within the local function's FRAME decl. Either way, let's see if
4797 we can fill in the MEM_ATTRs for this memory. */
4798 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4799 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4800
4801 /* Creator of a heap trampoline is responsible for making sure the
4802 address is aligned to at least STACK_BOUNDARY. Normally malloc
4803 will ensure this anyhow. */
4804 tmp = round_trampoline_addr (r_tramp);
4805 if (tmp != r_tramp)
4806 {
4807 m_tramp = change_address (m_tramp, BLKmode, tmp);
4808 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4809 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4810 }
4811
4812 /* The FUNC argument should be the address of the nested function.
4813 Extract the actual function decl to pass to the hook. */
4814 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4815 t_func = TREE_OPERAND (t_func, 0);
4816 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4817
4818 r_chain = expand_normal (t_chain);
4819
4820 /* Generate insns to initialize the trampoline. */
4821 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4822
4823 if (onstack)
4824 {
4825 trampolines_created = 1;
4826
4827 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4828 "trampoline generated for nested function %qD", t_func);
4829 }
4830
4831 return const0_rtx;
4832 }
4833
4834 static rtx
4835 expand_builtin_adjust_trampoline (tree exp)
4836 {
4837 rtx tramp;
4838
4839 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4840 return NULL_RTX;
4841
4842 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4843 tramp = round_trampoline_addr (tramp);
4844 if (targetm.calls.trampoline_adjust_address)
4845 tramp = targetm.calls.trampoline_adjust_address (tramp);
4846
4847 return tramp;
4848 }
4849
4850 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4851 function. The function first checks whether the back end provides
4852 an insn to implement signbit for the respective mode. If not, it
4853 checks whether the floating point format of the value is such that
4854 the sign bit can be extracted. If that is not the case, the
4855 function returns NULL_RTX to indicate that a normal call should be
4856 emitted rather than expanding the function in-line. EXP is the
4857 expression that is a call to the builtin function; if convenient,
4858 the result should be placed in TARGET. */
4859 static rtx
4860 expand_builtin_signbit (tree exp, rtx target)
4861 {
4862 const struct real_format *fmt;
4863 enum machine_mode fmode, imode, rmode;
4864 tree arg;
4865 int word, bitpos;
4866 enum insn_code icode;
4867 rtx temp;
4868 location_t loc = EXPR_LOCATION (exp);
4869
4870 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4871 return NULL_RTX;
4872
4873 arg = CALL_EXPR_ARG (exp, 0);
4874 fmode = TYPE_MODE (TREE_TYPE (arg));
4875 rmode = TYPE_MODE (TREE_TYPE (exp));
4876 fmt = REAL_MODE_FORMAT (fmode);
4877
4878 arg = builtin_save_expr (arg);
4879
4880 /* Expand the argument yielding a RTX expression. */
4881 temp = expand_normal (arg);
4882
4883 /* Check if the back end provides an insn that handles signbit for the
4884 argument's mode. */
4885 icode = optab_handler (signbit_optab, fmode);
4886 if (icode != CODE_FOR_nothing)
4887 {
4888 rtx last = get_last_insn ();
4889 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4890 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4891 return target;
4892 delete_insns_since (last);
4893 }
4894
4895 /* For floating point formats without a sign bit, implement signbit
4896 as "ARG < 0.0". */
4897 bitpos = fmt->signbit_ro;
4898 if (bitpos < 0)
4899 {
4900 /* But we can't do this if the format supports signed zero. */
4901 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4902 return NULL_RTX;
4903
4904 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4905 build_real (TREE_TYPE (arg), dconst0));
4906 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4907 }
4908
4909 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4910 {
4911 imode = int_mode_for_mode (fmode);
4912 if (imode == BLKmode)
4913 return NULL_RTX;
4914 temp = gen_lowpart (imode, temp);
4915 }
4916 else
4917 {
4918 imode = word_mode;
4919 /* Handle targets with different FP word orders. */
4920 if (FLOAT_WORDS_BIG_ENDIAN)
4921 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4922 else
4923 word = bitpos / BITS_PER_WORD;
4924 temp = operand_subword_force (temp, word, fmode);
4925 bitpos = bitpos % BITS_PER_WORD;
4926 }
4927
4928 /* Force the intermediate word_mode (or narrower) result into a
4929 register. This avoids attempting to create paradoxical SUBREGs
4930 of floating point modes below. */
4931 temp = force_reg (imode, temp);
4932
4933 /* If the bitpos is within the "result mode" lowpart, the operation
4934 can be implement with a single bitwise AND. Otherwise, we need
4935 a right shift and an AND. */
4936
4937 if (bitpos < GET_MODE_BITSIZE (rmode))
4938 {
4939 double_int mask = double_int_zero.set_bit (bitpos);
4940
4941 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4942 temp = gen_lowpart (rmode, temp);
4943 temp = expand_binop (rmode, and_optab, temp,
4944 immed_double_int_const (mask, rmode),
4945 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4946 }
4947 else
4948 {
4949 /* Perform a logical right shift to place the signbit in the least
4950 significant bit, then truncate the result to the desired mode
4951 and mask just this bit. */
4952 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4953 temp = gen_lowpart (rmode, temp);
4954 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4955 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4956 }
4957
4958 return temp;
4959 }
4960
4961 /* Expand fork or exec calls. TARGET is the desired target of the
4962 call. EXP is the call. FN is the
4963 identificator of the actual function. IGNORE is nonzero if the
4964 value is to be ignored. */
4965
4966 static rtx
4967 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4968 {
4969 tree id, decl;
4970 tree call;
4971
4972 /* If we are not profiling, just call the function. */
4973 if (!profile_arc_flag)
4974 return NULL_RTX;
4975
4976 /* Otherwise call the wrapper. This should be equivalent for the rest of
4977 compiler, so the code does not diverge, and the wrapper may run the
4978 code necessary for keeping the profiling sane. */
4979
4980 switch (DECL_FUNCTION_CODE (fn))
4981 {
4982 case BUILT_IN_FORK:
4983 id = get_identifier ("__gcov_fork");
4984 break;
4985
4986 case BUILT_IN_EXECL:
4987 id = get_identifier ("__gcov_execl");
4988 break;
4989
4990 case BUILT_IN_EXECV:
4991 id = get_identifier ("__gcov_execv");
4992 break;
4993
4994 case BUILT_IN_EXECLP:
4995 id = get_identifier ("__gcov_execlp");
4996 break;
4997
4998 case BUILT_IN_EXECLE:
4999 id = get_identifier ("__gcov_execle");
5000 break;
5001
5002 case BUILT_IN_EXECVP:
5003 id = get_identifier ("__gcov_execvp");
5004 break;
5005
5006 case BUILT_IN_EXECVE:
5007 id = get_identifier ("__gcov_execve");
5008 break;
5009
5010 default:
5011 gcc_unreachable ();
5012 }
5013
5014 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5015 FUNCTION_DECL, id, TREE_TYPE (fn));
5016 DECL_EXTERNAL (decl) = 1;
5017 TREE_PUBLIC (decl) = 1;
5018 DECL_ARTIFICIAL (decl) = 1;
5019 TREE_NOTHROW (decl) = 1;
5020 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5021 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5022 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5023 return expand_call (call, target, ignore);
5024 }
5025
5026
5027 \f
5028 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5029 the pointer in these functions is void*, the tree optimizers may remove
5030 casts. The mode computed in expand_builtin isn't reliable either, due
5031 to __sync_bool_compare_and_swap.
5032
5033 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5034 group of builtins. This gives us log2 of the mode size. */
5035
5036 static inline enum machine_mode
5037 get_builtin_sync_mode (int fcode_diff)
5038 {
5039 /* The size is not negotiable, so ask not to get BLKmode in return
5040 if the target indicates that a smaller size would be better. */
5041 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5042 }
5043
5044 /* Expand the memory expression LOC and return the appropriate memory operand
5045 for the builtin_sync operations. */
5046
5047 static rtx
5048 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5049 {
5050 rtx addr, mem;
5051
5052 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5053 addr = convert_memory_address (Pmode, addr);
5054
5055 /* Note that we explicitly do not want any alias information for this
5056 memory, so that we kill all other live memories. Otherwise we don't
5057 satisfy the full barrier semantics of the intrinsic. */
5058 mem = validize_mem (gen_rtx_MEM (mode, addr));
5059
5060 /* The alignment needs to be at least according to that of the mode. */
5061 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5062 get_pointer_alignment (loc)));
5063 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5064 MEM_VOLATILE_P (mem) = 1;
5065
5066 return mem;
5067 }
5068
5069 /* Make sure an argument is in the right mode.
5070 EXP is the tree argument.
5071 MODE is the mode it should be in. */
5072
5073 static rtx
5074 expand_expr_force_mode (tree exp, enum machine_mode mode)
5075 {
5076 rtx val;
5077 enum machine_mode old_mode;
5078
5079 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5080 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5081 of CONST_INTs, where we know the old_mode only from the call argument. */
5082
5083 old_mode = GET_MODE (val);
5084 if (old_mode == VOIDmode)
5085 old_mode = TYPE_MODE (TREE_TYPE (exp));
5086 val = convert_modes (mode, old_mode, val, 1);
5087 return val;
5088 }
5089
5090
5091 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5092 EXP is the CALL_EXPR. CODE is the rtx code
5093 that corresponds to the arithmetic or logical operation from the name;
5094 an exception here is that NOT actually means NAND. TARGET is an optional
5095 place for us to store the results; AFTER is true if this is the
5096 fetch_and_xxx form. */
5097
5098 static rtx
5099 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5100 enum rtx_code code, bool after,
5101 rtx target)
5102 {
5103 rtx val, mem;
5104 location_t loc = EXPR_LOCATION (exp);
5105
5106 if (code == NOT && warn_sync_nand)
5107 {
5108 tree fndecl = get_callee_fndecl (exp);
5109 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5110
5111 static bool warned_f_a_n, warned_n_a_f;
5112
5113 switch (fcode)
5114 {
5115 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5116 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5117 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5118 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5119 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5120 if (warned_f_a_n)
5121 break;
5122
5123 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5124 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5125 warned_f_a_n = true;
5126 break;
5127
5128 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5129 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5130 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5131 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5132 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5133 if (warned_n_a_f)
5134 break;
5135
5136 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5137 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5138 warned_n_a_f = true;
5139 break;
5140
5141 default:
5142 gcc_unreachable ();
5143 }
5144 }
5145
5146 /* Expand the operands. */
5147 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5148 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5149
5150 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5151 after);
5152 }
5153
5154 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5155 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5156 true if this is the boolean form. TARGET is a place for us to store the
5157 results; this is NOT optional if IS_BOOL is true. */
5158
5159 static rtx
5160 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5161 bool is_bool, rtx target)
5162 {
5163 rtx old_val, new_val, mem;
5164 rtx *pbool, *poval;
5165
5166 /* Expand the operands. */
5167 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5168 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5169 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5170
5171 pbool = poval = NULL;
5172 if (target != const0_rtx)
5173 {
5174 if (is_bool)
5175 pbool = &target;
5176 else
5177 poval = &target;
5178 }
5179 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5180 false, MEMMODEL_SEQ_CST,
5181 MEMMODEL_SEQ_CST))
5182 return NULL_RTX;
5183
5184 return target;
5185 }
5186
5187 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5188 general form is actually an atomic exchange, and some targets only
5189 support a reduced form with the second argument being a constant 1.
5190 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5191 the results. */
5192
5193 static rtx
5194 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5195 rtx target)
5196 {
5197 rtx val, mem;
5198
5199 /* Expand the operands. */
5200 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5201 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5202
5203 return expand_sync_lock_test_and_set (target, mem, val);
5204 }
5205
5206 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5207
5208 static void
5209 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5210 {
5211 rtx mem;
5212
5213 /* Expand the operands. */
5214 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5215
5216 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5217 }
5218
5219 /* Given an integer representing an ``enum memmodel'', verify its
5220 correctness and return the memory model enum. */
5221
5222 static enum memmodel
5223 get_memmodel (tree exp)
5224 {
5225 rtx op;
5226 unsigned HOST_WIDE_INT val;
5227
5228 /* If the parameter is not a constant, it's a run time value so we'll just
5229 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5230 if (TREE_CODE (exp) != INTEGER_CST)
5231 return MEMMODEL_SEQ_CST;
5232
5233 op = expand_normal (exp);
5234
5235 val = INTVAL (op);
5236 if (targetm.memmodel_check)
5237 val = targetm.memmodel_check (val);
5238 else if (val & ~MEMMODEL_MASK)
5239 {
5240 warning (OPT_Winvalid_memory_model,
5241 "Unknown architecture specifier in memory model to builtin.");
5242 return MEMMODEL_SEQ_CST;
5243 }
5244
5245 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5246 {
5247 warning (OPT_Winvalid_memory_model,
5248 "invalid memory model argument to builtin");
5249 return MEMMODEL_SEQ_CST;
5250 }
5251
5252 return (enum memmodel) val;
5253 }
5254
5255 /* Expand the __atomic_exchange intrinsic:
5256 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5257 EXP is the CALL_EXPR.
5258 TARGET is an optional place for us to store the results. */
5259
5260 static rtx
5261 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5262 {
5263 rtx val, mem;
5264 enum memmodel model;
5265
5266 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5267 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5268 {
5269 error ("invalid memory model for %<__atomic_exchange%>");
5270 return NULL_RTX;
5271 }
5272
5273 if (!flag_inline_atomics)
5274 return NULL_RTX;
5275
5276 /* Expand the operands. */
5277 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5278 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5279
5280 return expand_atomic_exchange (target, mem, val, model);
5281 }
5282
5283 /* Expand the __atomic_compare_exchange intrinsic:
5284 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5285 TYPE desired, BOOL weak,
5286 enum memmodel success,
5287 enum memmodel failure)
5288 EXP is the CALL_EXPR.
5289 TARGET is an optional place for us to store the results. */
5290
5291 static rtx
5292 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5293 rtx target)
5294 {
5295 rtx expect, desired, mem, oldval, label;
5296 enum memmodel success, failure;
5297 tree weak;
5298 bool is_weak;
5299
5300 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5301 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5302
5303 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5304 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5305 {
5306 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5307 return NULL_RTX;
5308 }
5309
5310 if (failure > success)
5311 {
5312 error ("failure memory model cannot be stronger than success "
5313 "memory model for %<__atomic_compare_exchange%>");
5314 return NULL_RTX;
5315 }
5316
5317 if (!flag_inline_atomics)
5318 return NULL_RTX;
5319
5320 /* Expand the operands. */
5321 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5322
5323 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5324 expect = convert_memory_address (Pmode, expect);
5325 expect = gen_rtx_MEM (mode, expect);
5326 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5327
5328 weak = CALL_EXPR_ARG (exp, 3);
5329 is_weak = false;
5330 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5331 is_weak = true;
5332
5333 if (target == const0_rtx)
5334 target = NULL;
5335
5336 /* Lest the rtl backend create a race condition with an imporoper store
5337 to memory, always create a new pseudo for OLDVAL. */
5338 oldval = NULL;
5339
5340 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5341 is_weak, success, failure))
5342 return NULL_RTX;
5343
5344 /* Conditionally store back to EXPECT, lest we create a race condition
5345 with an improper store to memory. */
5346 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5347 the normal case where EXPECT is totally private, i.e. a register. At
5348 which point the store can be unconditional. */
5349 label = gen_label_rtx ();
5350 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5351 emit_move_insn (expect, oldval);
5352 emit_label (label);
5353
5354 return target;
5355 }
5356
5357 /* Expand the __atomic_load intrinsic:
5358 TYPE __atomic_load (TYPE *object, enum memmodel)
5359 EXP is the CALL_EXPR.
5360 TARGET is an optional place for us to store the results. */
5361
5362 static rtx
5363 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5364 {
5365 rtx mem;
5366 enum memmodel model;
5367
5368 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5369 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5370 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5371 {
5372 error ("invalid memory model for %<__atomic_load%>");
5373 return NULL_RTX;
5374 }
5375
5376 if (!flag_inline_atomics)
5377 return NULL_RTX;
5378
5379 /* Expand the operand. */
5380 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5381
5382 return expand_atomic_load (target, mem, model);
5383 }
5384
5385
5386 /* Expand the __atomic_store intrinsic:
5387 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5388 EXP is the CALL_EXPR.
5389 TARGET is an optional place for us to store the results. */
5390
5391 static rtx
5392 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5393 {
5394 rtx mem, val;
5395 enum memmodel model;
5396
5397 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5398 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5399 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5400 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5401 {
5402 error ("invalid memory model for %<__atomic_store%>");
5403 return NULL_RTX;
5404 }
5405
5406 if (!flag_inline_atomics)
5407 return NULL_RTX;
5408
5409 /* Expand the operands. */
5410 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5411 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5412
5413 return expand_atomic_store (mem, val, model, false);
5414 }
5415
5416 /* Expand the __atomic_fetch_XXX intrinsic:
5417 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5418 EXP is the CALL_EXPR.
5419 TARGET is an optional place for us to store the results.
5420 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5421 FETCH_AFTER is true if returning the result of the operation.
5422 FETCH_AFTER is false if returning the value before the operation.
5423 IGNORE is true if the result is not used.
5424 EXT_CALL is the correct builtin for an external call if this cannot be
5425 resolved to an instruction sequence. */
5426
5427 static rtx
5428 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5429 enum rtx_code code, bool fetch_after,
5430 bool ignore, enum built_in_function ext_call)
5431 {
5432 rtx val, mem, ret;
5433 enum memmodel model;
5434 tree fndecl;
5435 tree addr;
5436
5437 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5438
5439 /* Expand the operands. */
5440 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5441 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5442
5443 /* Only try generating instructions if inlining is turned on. */
5444 if (flag_inline_atomics)
5445 {
5446 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5447 if (ret)
5448 return ret;
5449 }
5450
5451 /* Return if a different routine isn't needed for the library call. */
5452 if (ext_call == BUILT_IN_NONE)
5453 return NULL_RTX;
5454
5455 /* Change the call to the specified function. */
5456 fndecl = get_callee_fndecl (exp);
5457 addr = CALL_EXPR_FN (exp);
5458 STRIP_NOPS (addr);
5459
5460 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5461 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5462
5463 /* Expand the call here so we can emit trailing code. */
5464 ret = expand_call (exp, target, ignore);
5465
5466 /* Replace the original function just in case it matters. */
5467 TREE_OPERAND (addr, 0) = fndecl;
5468
5469 /* Then issue the arithmetic correction to return the right result. */
5470 if (!ignore)
5471 {
5472 if (code == NOT)
5473 {
5474 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5475 OPTAB_LIB_WIDEN);
5476 ret = expand_simple_unop (mode, NOT, ret, target, true);
5477 }
5478 else
5479 ret = expand_simple_binop (mode, code, ret, val, target, true,
5480 OPTAB_LIB_WIDEN);
5481 }
5482 return ret;
5483 }
5484
5485
5486 #ifndef HAVE_atomic_clear
5487 # define HAVE_atomic_clear 0
5488 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5489 #endif
5490
5491 /* Expand an atomic clear operation.
5492 void _atomic_clear (BOOL *obj, enum memmodel)
5493 EXP is the call expression. */
5494
5495 static rtx
5496 expand_builtin_atomic_clear (tree exp)
5497 {
5498 enum machine_mode mode;
5499 rtx mem, ret;
5500 enum memmodel model;
5501
5502 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5503 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5504 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5505
5506 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5507 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5508 {
5509 error ("invalid memory model for %<__atomic_store%>");
5510 return const0_rtx;
5511 }
5512
5513 if (HAVE_atomic_clear)
5514 {
5515 emit_insn (gen_atomic_clear (mem, model));
5516 return const0_rtx;
5517 }
5518
5519 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5520 Failing that, a store is issued by __atomic_store. The only way this can
5521 fail is if the bool type is larger than a word size. Unlikely, but
5522 handle it anyway for completeness. Assume a single threaded model since
5523 there is no atomic support in this case, and no barriers are required. */
5524 ret = expand_atomic_store (mem, const0_rtx, model, true);
5525 if (!ret)
5526 emit_move_insn (mem, const0_rtx);
5527 return const0_rtx;
5528 }
5529
5530 /* Expand an atomic test_and_set operation.
5531 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5532 EXP is the call expression. */
5533
5534 static rtx
5535 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5536 {
5537 rtx mem;
5538 enum memmodel model;
5539 enum machine_mode mode;
5540
5541 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5542 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5543 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5544
5545 return expand_atomic_test_and_set (target, mem, model);
5546 }
5547
5548
5549 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5550 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5551
5552 static tree
5553 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5554 {
5555 int size;
5556 enum machine_mode mode;
5557 unsigned int mode_align, type_align;
5558
5559 if (TREE_CODE (arg0) != INTEGER_CST)
5560 return NULL_TREE;
5561
5562 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5563 mode = mode_for_size (size, MODE_INT, 0);
5564 mode_align = GET_MODE_ALIGNMENT (mode);
5565
5566 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5567 type_align = mode_align;
5568 else
5569 {
5570 tree ttype = TREE_TYPE (arg1);
5571
5572 /* This function is usually invoked and folded immediately by the front
5573 end before anything else has a chance to look at it. The pointer
5574 parameter at this point is usually cast to a void *, so check for that
5575 and look past the cast. */
5576 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5577 && VOID_TYPE_P (TREE_TYPE (ttype)))
5578 arg1 = TREE_OPERAND (arg1, 0);
5579
5580 ttype = TREE_TYPE (arg1);
5581 gcc_assert (POINTER_TYPE_P (ttype));
5582
5583 /* Get the underlying type of the object. */
5584 ttype = TREE_TYPE (ttype);
5585 type_align = TYPE_ALIGN (ttype);
5586 }
5587
5588 /* If the object has smaller alignment, the the lock free routines cannot
5589 be used. */
5590 if (type_align < mode_align)
5591 return boolean_false_node;
5592
5593 /* Check if a compare_and_swap pattern exists for the mode which represents
5594 the required size. The pattern is not allowed to fail, so the existence
5595 of the pattern indicates support is present. */
5596 if (can_compare_and_swap_p (mode, true))
5597 return boolean_true_node;
5598 else
5599 return boolean_false_node;
5600 }
5601
5602 /* Return true if the parameters to call EXP represent an object which will
5603 always generate lock free instructions. The first argument represents the
5604 size of the object, and the second parameter is a pointer to the object
5605 itself. If NULL is passed for the object, then the result is based on
5606 typical alignment for an object of the specified size. Otherwise return
5607 false. */
5608
5609 static rtx
5610 expand_builtin_atomic_always_lock_free (tree exp)
5611 {
5612 tree size;
5613 tree arg0 = CALL_EXPR_ARG (exp, 0);
5614 tree arg1 = CALL_EXPR_ARG (exp, 1);
5615
5616 if (TREE_CODE (arg0) != INTEGER_CST)
5617 {
5618 error ("non-constant argument 1 to __atomic_always_lock_free");
5619 return const0_rtx;
5620 }
5621
5622 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5623 if (size == boolean_true_node)
5624 return const1_rtx;
5625 return const0_rtx;
5626 }
5627
5628 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5629 is lock free on this architecture. */
5630
5631 static tree
5632 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5633 {
5634 if (!flag_inline_atomics)
5635 return NULL_TREE;
5636
5637 /* If it isn't always lock free, don't generate a result. */
5638 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5639 return boolean_true_node;
5640
5641 return NULL_TREE;
5642 }
5643
5644 /* Return true if the parameters to call EXP represent an object which will
5645 always generate lock free instructions. The first argument represents the
5646 size of the object, and the second parameter is a pointer to the object
5647 itself. If NULL is passed for the object, then the result is based on
5648 typical alignment for an object of the specified size. Otherwise return
5649 NULL*/
5650
5651 static rtx
5652 expand_builtin_atomic_is_lock_free (tree exp)
5653 {
5654 tree size;
5655 tree arg0 = CALL_EXPR_ARG (exp, 0);
5656 tree arg1 = CALL_EXPR_ARG (exp, 1);
5657
5658 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5659 {
5660 error ("non-integer argument 1 to __atomic_is_lock_free");
5661 return NULL_RTX;
5662 }
5663
5664 if (!flag_inline_atomics)
5665 return NULL_RTX;
5666
5667 /* If the value is known at compile time, return the RTX for it. */
5668 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5669 if (size == boolean_true_node)
5670 return const1_rtx;
5671
5672 return NULL_RTX;
5673 }
5674
5675 /* Expand the __atomic_thread_fence intrinsic:
5676 void __atomic_thread_fence (enum memmodel)
5677 EXP is the CALL_EXPR. */
5678
5679 static void
5680 expand_builtin_atomic_thread_fence (tree exp)
5681 {
5682 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5683 expand_mem_thread_fence (model);
5684 }
5685
5686 /* Expand the __atomic_signal_fence intrinsic:
5687 void __atomic_signal_fence (enum memmodel)
5688 EXP is the CALL_EXPR. */
5689
5690 static void
5691 expand_builtin_atomic_signal_fence (tree exp)
5692 {
5693 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5694 expand_mem_signal_fence (model);
5695 }
5696
5697 /* Expand the __sync_synchronize intrinsic. */
5698
5699 static void
5700 expand_builtin_sync_synchronize (void)
5701 {
5702 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5703 }
5704
5705 static rtx
5706 expand_builtin_thread_pointer (tree exp, rtx target)
5707 {
5708 enum insn_code icode;
5709 if (!validate_arglist (exp, VOID_TYPE))
5710 return const0_rtx;
5711 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5712 if (icode != CODE_FOR_nothing)
5713 {
5714 struct expand_operand op;
5715 /* If the target is not sutitable then create a new target. */
5716 if (target == NULL_RTX
5717 || !REG_P (target)
5718 || GET_MODE (target) != Pmode)
5719 target = gen_reg_rtx (Pmode);
5720 create_output_operand (&op, target, Pmode);
5721 expand_insn (icode, 1, &op);
5722 return target;
5723 }
5724 error ("__builtin_thread_pointer is not supported on this target");
5725 return const0_rtx;
5726 }
5727
5728 static void
5729 expand_builtin_set_thread_pointer (tree exp)
5730 {
5731 enum insn_code icode;
5732 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5733 return;
5734 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5735 if (icode != CODE_FOR_nothing)
5736 {
5737 struct expand_operand op;
5738 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5739 Pmode, EXPAND_NORMAL);
5740 create_input_operand (&op, val, Pmode);
5741 expand_insn (icode, 1, &op);
5742 return;
5743 }
5744 error ("__builtin_set_thread_pointer is not supported on this target");
5745 }
5746
5747 \f
5748 /* Emit code to restore the current value of stack. */
5749
5750 static void
5751 expand_stack_restore (tree var)
5752 {
5753 rtx prev, sa = expand_normal (var);
5754
5755 sa = convert_memory_address (Pmode, sa);
5756
5757 prev = get_last_insn ();
5758 emit_stack_restore (SAVE_BLOCK, sa);
5759 fixup_args_size_notes (prev, get_last_insn (), 0);
5760 }
5761
5762
5763 /* Emit code to save the current value of stack. */
5764
5765 static rtx
5766 expand_stack_save (void)
5767 {
5768 rtx ret = NULL_RTX;
5769
5770 do_pending_stack_adjust ();
5771 emit_stack_save (SAVE_BLOCK, &ret);
5772 return ret;
5773 }
5774
5775 /* Expand an expression EXP that calls a built-in function,
5776 with result going to TARGET if that's convenient
5777 (and in mode MODE if that's convenient).
5778 SUBTARGET may be used as the target for computing one of EXP's operands.
5779 IGNORE is nonzero if the value is to be ignored. */
5780
5781 rtx
5782 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5783 int ignore)
5784 {
5785 tree fndecl = get_callee_fndecl (exp);
5786 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5787 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5788 int flags;
5789
5790 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5791 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5792
5793 /* When not optimizing, generate calls to library functions for a certain
5794 set of builtins. */
5795 if (!optimize
5796 && !called_as_built_in (fndecl)
5797 && fcode != BUILT_IN_FORK
5798 && fcode != BUILT_IN_EXECL
5799 && fcode != BUILT_IN_EXECV
5800 && fcode != BUILT_IN_EXECLP
5801 && fcode != BUILT_IN_EXECLE
5802 && fcode != BUILT_IN_EXECVP
5803 && fcode != BUILT_IN_EXECVE
5804 && fcode != BUILT_IN_ALLOCA
5805 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5806 && fcode != BUILT_IN_FREE)
5807 return expand_call (exp, target, ignore);
5808
5809 /* The built-in function expanders test for target == const0_rtx
5810 to determine whether the function's result will be ignored. */
5811 if (ignore)
5812 target = const0_rtx;
5813
5814 /* If the result of a pure or const built-in function is ignored, and
5815 none of its arguments are volatile, we can avoid expanding the
5816 built-in call and just evaluate the arguments for side-effects. */
5817 if (target == const0_rtx
5818 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5819 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5820 {
5821 bool volatilep = false;
5822 tree arg;
5823 call_expr_arg_iterator iter;
5824
5825 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5826 if (TREE_THIS_VOLATILE (arg))
5827 {
5828 volatilep = true;
5829 break;
5830 }
5831
5832 if (! volatilep)
5833 {
5834 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5835 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5836 return const0_rtx;
5837 }
5838 }
5839
5840 switch (fcode)
5841 {
5842 CASE_FLT_FN (BUILT_IN_FABS):
5843 case BUILT_IN_FABSD32:
5844 case BUILT_IN_FABSD64:
5845 case BUILT_IN_FABSD128:
5846 target = expand_builtin_fabs (exp, target, subtarget);
5847 if (target)
5848 return target;
5849 break;
5850
5851 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5852 target = expand_builtin_copysign (exp, target, subtarget);
5853 if (target)
5854 return target;
5855 break;
5856
5857 /* Just do a normal library call if we were unable to fold
5858 the values. */
5859 CASE_FLT_FN (BUILT_IN_CABS):
5860 break;
5861
5862 CASE_FLT_FN (BUILT_IN_EXP):
5863 CASE_FLT_FN (BUILT_IN_EXP10):
5864 CASE_FLT_FN (BUILT_IN_POW10):
5865 CASE_FLT_FN (BUILT_IN_EXP2):
5866 CASE_FLT_FN (BUILT_IN_EXPM1):
5867 CASE_FLT_FN (BUILT_IN_LOGB):
5868 CASE_FLT_FN (BUILT_IN_LOG):
5869 CASE_FLT_FN (BUILT_IN_LOG10):
5870 CASE_FLT_FN (BUILT_IN_LOG2):
5871 CASE_FLT_FN (BUILT_IN_LOG1P):
5872 CASE_FLT_FN (BUILT_IN_TAN):
5873 CASE_FLT_FN (BUILT_IN_ASIN):
5874 CASE_FLT_FN (BUILT_IN_ACOS):
5875 CASE_FLT_FN (BUILT_IN_ATAN):
5876 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5877 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5878 because of possible accuracy problems. */
5879 if (! flag_unsafe_math_optimizations)
5880 break;
5881 CASE_FLT_FN (BUILT_IN_SQRT):
5882 CASE_FLT_FN (BUILT_IN_FLOOR):
5883 CASE_FLT_FN (BUILT_IN_CEIL):
5884 CASE_FLT_FN (BUILT_IN_TRUNC):
5885 CASE_FLT_FN (BUILT_IN_ROUND):
5886 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5887 CASE_FLT_FN (BUILT_IN_RINT):
5888 target = expand_builtin_mathfn (exp, target, subtarget);
5889 if (target)
5890 return target;
5891 break;
5892
5893 CASE_FLT_FN (BUILT_IN_FMA):
5894 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5895 if (target)
5896 return target;
5897 break;
5898
5899 CASE_FLT_FN (BUILT_IN_ILOGB):
5900 if (! flag_unsafe_math_optimizations)
5901 break;
5902 CASE_FLT_FN (BUILT_IN_ISINF):
5903 CASE_FLT_FN (BUILT_IN_FINITE):
5904 case BUILT_IN_ISFINITE:
5905 case BUILT_IN_ISNORMAL:
5906 target = expand_builtin_interclass_mathfn (exp, target);
5907 if (target)
5908 return target;
5909 break;
5910
5911 CASE_FLT_FN (BUILT_IN_ICEIL):
5912 CASE_FLT_FN (BUILT_IN_LCEIL):
5913 CASE_FLT_FN (BUILT_IN_LLCEIL):
5914 CASE_FLT_FN (BUILT_IN_LFLOOR):
5915 CASE_FLT_FN (BUILT_IN_IFLOOR):
5916 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5917 target = expand_builtin_int_roundingfn (exp, target);
5918 if (target)
5919 return target;
5920 break;
5921
5922 CASE_FLT_FN (BUILT_IN_IRINT):
5923 CASE_FLT_FN (BUILT_IN_LRINT):
5924 CASE_FLT_FN (BUILT_IN_LLRINT):
5925 CASE_FLT_FN (BUILT_IN_IROUND):
5926 CASE_FLT_FN (BUILT_IN_LROUND):
5927 CASE_FLT_FN (BUILT_IN_LLROUND):
5928 target = expand_builtin_int_roundingfn_2 (exp, target);
5929 if (target)
5930 return target;
5931 break;
5932
5933 CASE_FLT_FN (BUILT_IN_POWI):
5934 target = expand_builtin_powi (exp, target);
5935 if (target)
5936 return target;
5937 break;
5938
5939 CASE_FLT_FN (BUILT_IN_ATAN2):
5940 CASE_FLT_FN (BUILT_IN_LDEXP):
5941 CASE_FLT_FN (BUILT_IN_SCALB):
5942 CASE_FLT_FN (BUILT_IN_SCALBN):
5943 CASE_FLT_FN (BUILT_IN_SCALBLN):
5944 if (! flag_unsafe_math_optimizations)
5945 break;
5946
5947 CASE_FLT_FN (BUILT_IN_FMOD):
5948 CASE_FLT_FN (BUILT_IN_REMAINDER):
5949 CASE_FLT_FN (BUILT_IN_DREM):
5950 CASE_FLT_FN (BUILT_IN_POW):
5951 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5952 if (target)
5953 return target;
5954 break;
5955
5956 CASE_FLT_FN (BUILT_IN_CEXPI):
5957 target = expand_builtin_cexpi (exp, target);
5958 gcc_assert (target);
5959 return target;
5960
5961 CASE_FLT_FN (BUILT_IN_SIN):
5962 CASE_FLT_FN (BUILT_IN_COS):
5963 if (! flag_unsafe_math_optimizations)
5964 break;
5965 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5966 if (target)
5967 return target;
5968 break;
5969
5970 CASE_FLT_FN (BUILT_IN_SINCOS):
5971 if (! flag_unsafe_math_optimizations)
5972 break;
5973 target = expand_builtin_sincos (exp);
5974 if (target)
5975 return target;
5976 break;
5977
5978 case BUILT_IN_APPLY_ARGS:
5979 return expand_builtin_apply_args ();
5980
5981 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5982 FUNCTION with a copy of the parameters described by
5983 ARGUMENTS, and ARGSIZE. It returns a block of memory
5984 allocated on the stack into which is stored all the registers
5985 that might possibly be used for returning the result of a
5986 function. ARGUMENTS is the value returned by
5987 __builtin_apply_args. ARGSIZE is the number of bytes of
5988 arguments that must be copied. ??? How should this value be
5989 computed? We'll also need a safe worst case value for varargs
5990 functions. */
5991 case BUILT_IN_APPLY:
5992 if (!validate_arglist (exp, POINTER_TYPE,
5993 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5994 && !validate_arglist (exp, REFERENCE_TYPE,
5995 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5996 return const0_rtx;
5997 else
5998 {
5999 rtx ops[3];
6000
6001 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6002 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6003 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6004
6005 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6006 }
6007
6008 /* __builtin_return (RESULT) causes the function to return the
6009 value described by RESULT. RESULT is address of the block of
6010 memory returned by __builtin_apply. */
6011 case BUILT_IN_RETURN:
6012 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6013 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6014 return const0_rtx;
6015
6016 case BUILT_IN_SAVEREGS:
6017 return expand_builtin_saveregs ();
6018
6019 case BUILT_IN_VA_ARG_PACK:
6020 /* All valid uses of __builtin_va_arg_pack () are removed during
6021 inlining. */
6022 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6023 return const0_rtx;
6024
6025 case BUILT_IN_VA_ARG_PACK_LEN:
6026 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6027 inlining. */
6028 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6029 return const0_rtx;
6030
6031 /* Return the address of the first anonymous stack arg. */
6032 case BUILT_IN_NEXT_ARG:
6033 if (fold_builtin_next_arg (exp, false))
6034 return const0_rtx;
6035 return expand_builtin_next_arg ();
6036
6037 case BUILT_IN_CLEAR_CACHE:
6038 target = expand_builtin___clear_cache (exp);
6039 if (target)
6040 return target;
6041 break;
6042
6043 case BUILT_IN_CLASSIFY_TYPE:
6044 return expand_builtin_classify_type (exp);
6045
6046 case BUILT_IN_CONSTANT_P:
6047 return const0_rtx;
6048
6049 case BUILT_IN_FRAME_ADDRESS:
6050 case BUILT_IN_RETURN_ADDRESS:
6051 return expand_builtin_frame_address (fndecl, exp);
6052
6053 /* Returns the address of the area where the structure is returned.
6054 0 otherwise. */
6055 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6056 if (call_expr_nargs (exp) != 0
6057 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6058 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6059 return const0_rtx;
6060 else
6061 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6062
6063 case BUILT_IN_ALLOCA:
6064 case BUILT_IN_ALLOCA_WITH_ALIGN:
6065 /* If the allocation stems from the declaration of a variable-sized
6066 object, it cannot accumulate. */
6067 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6068 if (target)
6069 return target;
6070 break;
6071
6072 case BUILT_IN_STACK_SAVE:
6073 return expand_stack_save ();
6074
6075 case BUILT_IN_STACK_RESTORE:
6076 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6077 return const0_rtx;
6078
6079 case BUILT_IN_BSWAP16:
6080 case BUILT_IN_BSWAP32:
6081 case BUILT_IN_BSWAP64:
6082 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6083 if (target)
6084 return target;
6085 break;
6086
6087 CASE_INT_FN (BUILT_IN_FFS):
6088 target = expand_builtin_unop (target_mode, exp, target,
6089 subtarget, ffs_optab);
6090 if (target)
6091 return target;
6092 break;
6093
6094 CASE_INT_FN (BUILT_IN_CLZ):
6095 target = expand_builtin_unop (target_mode, exp, target,
6096 subtarget, clz_optab);
6097 if (target)
6098 return target;
6099 break;
6100
6101 CASE_INT_FN (BUILT_IN_CTZ):
6102 target = expand_builtin_unop (target_mode, exp, target,
6103 subtarget, ctz_optab);
6104 if (target)
6105 return target;
6106 break;
6107
6108 CASE_INT_FN (BUILT_IN_CLRSB):
6109 target = expand_builtin_unop (target_mode, exp, target,
6110 subtarget, clrsb_optab);
6111 if (target)
6112 return target;
6113 break;
6114
6115 CASE_INT_FN (BUILT_IN_POPCOUNT):
6116 target = expand_builtin_unop (target_mode, exp, target,
6117 subtarget, popcount_optab);
6118 if (target)
6119 return target;
6120 break;
6121
6122 CASE_INT_FN (BUILT_IN_PARITY):
6123 target = expand_builtin_unop (target_mode, exp, target,
6124 subtarget, parity_optab);
6125 if (target)
6126 return target;
6127 break;
6128
6129 case BUILT_IN_STRLEN:
6130 target = expand_builtin_strlen (exp, target, target_mode);
6131 if (target)
6132 return target;
6133 break;
6134
6135 case BUILT_IN_STRCPY:
6136 target = expand_builtin_strcpy (exp, target);
6137 if (target)
6138 return target;
6139 break;
6140
6141 case BUILT_IN_STRNCPY:
6142 target = expand_builtin_strncpy (exp, target);
6143 if (target)
6144 return target;
6145 break;
6146
6147 case BUILT_IN_STPCPY:
6148 target = expand_builtin_stpcpy (exp, target, mode);
6149 if (target)
6150 return target;
6151 break;
6152
6153 case BUILT_IN_MEMCPY:
6154 target = expand_builtin_memcpy (exp, target);
6155 if (target)
6156 return target;
6157 break;
6158
6159 case BUILT_IN_MEMPCPY:
6160 target = expand_builtin_mempcpy (exp, target, mode);
6161 if (target)
6162 return target;
6163 break;
6164
6165 case BUILT_IN_MEMSET:
6166 target = expand_builtin_memset (exp, target, mode);
6167 if (target)
6168 return target;
6169 break;
6170
6171 case BUILT_IN_BZERO:
6172 target = expand_builtin_bzero (exp);
6173 if (target)
6174 return target;
6175 break;
6176
6177 case BUILT_IN_STRCMP:
6178 target = expand_builtin_strcmp (exp, target);
6179 if (target)
6180 return target;
6181 break;
6182
6183 case BUILT_IN_STRNCMP:
6184 target = expand_builtin_strncmp (exp, target, mode);
6185 if (target)
6186 return target;
6187 break;
6188
6189 case BUILT_IN_BCMP:
6190 case BUILT_IN_MEMCMP:
6191 target = expand_builtin_memcmp (exp, target, mode);
6192 if (target)
6193 return target;
6194 break;
6195
6196 case BUILT_IN_SETJMP:
6197 /* This should have been lowered to the builtins below. */
6198 gcc_unreachable ();
6199
6200 case BUILT_IN_SETJMP_SETUP:
6201 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6202 and the receiver label. */
6203 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6204 {
6205 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6206 VOIDmode, EXPAND_NORMAL);
6207 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6208 rtx label_r = label_rtx (label);
6209
6210 /* This is copied from the handling of non-local gotos. */
6211 expand_builtin_setjmp_setup (buf_addr, label_r);
6212 nonlocal_goto_handler_labels
6213 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6214 nonlocal_goto_handler_labels);
6215 /* ??? Do not let expand_label treat us as such since we would
6216 not want to be both on the list of non-local labels and on
6217 the list of forced labels. */
6218 FORCED_LABEL (label) = 0;
6219 return const0_rtx;
6220 }
6221 break;
6222
6223 case BUILT_IN_SETJMP_RECEIVER:
6224 /* __builtin_setjmp_receiver is passed the receiver label. */
6225 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6226 {
6227 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6228 rtx label_r = label_rtx (label);
6229
6230 expand_builtin_setjmp_receiver (label_r);
6231 return const0_rtx;
6232 }
6233 break;
6234
6235 /* __builtin_longjmp is passed a pointer to an array of five words.
6236 It's similar to the C library longjmp function but works with
6237 __builtin_setjmp above. */
6238 case BUILT_IN_LONGJMP:
6239 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6240 {
6241 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6242 VOIDmode, EXPAND_NORMAL);
6243 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6244
6245 if (value != const1_rtx)
6246 {
6247 error ("%<__builtin_longjmp%> second argument must be 1");
6248 return const0_rtx;
6249 }
6250
6251 expand_builtin_longjmp (buf_addr, value);
6252 return const0_rtx;
6253 }
6254 break;
6255
6256 case BUILT_IN_NONLOCAL_GOTO:
6257 target = expand_builtin_nonlocal_goto (exp);
6258 if (target)
6259 return target;
6260 break;
6261
6262 /* This updates the setjmp buffer that is its argument with the value
6263 of the current stack pointer. */
6264 case BUILT_IN_UPDATE_SETJMP_BUF:
6265 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6266 {
6267 rtx buf_addr
6268 = expand_normal (CALL_EXPR_ARG (exp, 0));
6269
6270 expand_builtin_update_setjmp_buf (buf_addr);
6271 return const0_rtx;
6272 }
6273 break;
6274
6275 case BUILT_IN_TRAP:
6276 expand_builtin_trap ();
6277 return const0_rtx;
6278
6279 case BUILT_IN_UNREACHABLE:
6280 expand_builtin_unreachable ();
6281 return const0_rtx;
6282
6283 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6284 case BUILT_IN_SIGNBITD32:
6285 case BUILT_IN_SIGNBITD64:
6286 case BUILT_IN_SIGNBITD128:
6287 target = expand_builtin_signbit (exp, target);
6288 if (target)
6289 return target;
6290 break;
6291
6292 /* Various hooks for the DWARF 2 __throw routine. */
6293 case BUILT_IN_UNWIND_INIT:
6294 expand_builtin_unwind_init ();
6295 return const0_rtx;
6296 case BUILT_IN_DWARF_CFA:
6297 return virtual_cfa_rtx;
6298 #ifdef DWARF2_UNWIND_INFO
6299 case BUILT_IN_DWARF_SP_COLUMN:
6300 return expand_builtin_dwarf_sp_column ();
6301 case BUILT_IN_INIT_DWARF_REG_SIZES:
6302 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6303 return const0_rtx;
6304 #endif
6305 case BUILT_IN_FROB_RETURN_ADDR:
6306 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6307 case BUILT_IN_EXTRACT_RETURN_ADDR:
6308 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6309 case BUILT_IN_EH_RETURN:
6310 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6311 CALL_EXPR_ARG (exp, 1));
6312 return const0_rtx;
6313 #ifdef EH_RETURN_DATA_REGNO
6314 case BUILT_IN_EH_RETURN_DATA_REGNO:
6315 return expand_builtin_eh_return_data_regno (exp);
6316 #endif
6317 case BUILT_IN_EXTEND_POINTER:
6318 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6319 case BUILT_IN_EH_POINTER:
6320 return expand_builtin_eh_pointer (exp);
6321 case BUILT_IN_EH_FILTER:
6322 return expand_builtin_eh_filter (exp);
6323 case BUILT_IN_EH_COPY_VALUES:
6324 return expand_builtin_eh_copy_values (exp);
6325
6326 case BUILT_IN_VA_START:
6327 return expand_builtin_va_start (exp);
6328 case BUILT_IN_VA_END:
6329 return expand_builtin_va_end (exp);
6330 case BUILT_IN_VA_COPY:
6331 return expand_builtin_va_copy (exp);
6332 case BUILT_IN_EXPECT:
6333 return expand_builtin_expect (exp, target);
6334 case BUILT_IN_ASSUME_ALIGNED:
6335 return expand_builtin_assume_aligned (exp, target);
6336 case BUILT_IN_PREFETCH:
6337 expand_builtin_prefetch (exp);
6338 return const0_rtx;
6339
6340 case BUILT_IN_INIT_TRAMPOLINE:
6341 return expand_builtin_init_trampoline (exp, true);
6342 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6343 return expand_builtin_init_trampoline (exp, false);
6344 case BUILT_IN_ADJUST_TRAMPOLINE:
6345 return expand_builtin_adjust_trampoline (exp);
6346
6347 case BUILT_IN_FORK:
6348 case BUILT_IN_EXECL:
6349 case BUILT_IN_EXECV:
6350 case BUILT_IN_EXECLP:
6351 case BUILT_IN_EXECLE:
6352 case BUILT_IN_EXECVP:
6353 case BUILT_IN_EXECVE:
6354 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6355 if (target)
6356 return target;
6357 break;
6358
6359 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6360 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6361 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6362 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6363 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6364 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6365 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6366 if (target)
6367 return target;
6368 break;
6369
6370 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6371 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6372 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6373 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6374 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6375 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6376 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6377 if (target)
6378 return target;
6379 break;
6380
6381 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6382 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6383 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6384 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6385 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6386 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6387 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6388 if (target)
6389 return target;
6390 break;
6391
6392 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6393 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6394 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6395 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6396 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6397 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6398 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6399 if (target)
6400 return target;
6401 break;
6402
6403 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6404 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6405 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6406 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6407 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6408 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6409 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6410 if (target)
6411 return target;
6412 break;
6413
6414 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6415 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6416 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6417 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6418 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6419 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6420 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6421 if (target)
6422 return target;
6423 break;
6424
6425 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6426 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6427 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6428 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6429 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6430 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6431 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6432 if (target)
6433 return target;
6434 break;
6435
6436 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6437 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6438 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6439 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6440 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6441 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6442 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6443 if (target)
6444 return target;
6445 break;
6446
6447 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6448 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6449 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6450 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6451 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6452 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6453 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6454 if (target)
6455 return target;
6456 break;
6457
6458 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6459 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6460 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6461 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6462 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6463 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6464 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6465 if (target)
6466 return target;
6467 break;
6468
6469 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6470 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6471 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6472 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6473 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6474 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6475 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6476 if (target)
6477 return target;
6478 break;
6479
6480 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6481 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6482 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6483 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6484 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6486 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6487 if (target)
6488 return target;
6489 break;
6490
6491 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6492 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6493 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6494 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6495 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6496 if (mode == VOIDmode)
6497 mode = TYPE_MODE (boolean_type_node);
6498 if (!target || !register_operand (target, mode))
6499 target = gen_reg_rtx (mode);
6500
6501 mode = get_builtin_sync_mode
6502 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6503 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6504 if (target)
6505 return target;
6506 break;
6507
6508 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6509 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6510 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6511 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6512 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6513 mode = get_builtin_sync_mode
6514 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6515 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6516 if (target)
6517 return target;
6518 break;
6519
6520 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6521 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6522 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6523 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6524 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6525 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6526 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6527 if (target)
6528 return target;
6529 break;
6530
6531 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6532 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6533 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6534 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6535 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6536 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6537 expand_builtin_sync_lock_release (mode, exp);
6538 return const0_rtx;
6539
6540 case BUILT_IN_SYNC_SYNCHRONIZE:
6541 expand_builtin_sync_synchronize ();
6542 return const0_rtx;
6543
6544 case BUILT_IN_ATOMIC_EXCHANGE_1:
6545 case BUILT_IN_ATOMIC_EXCHANGE_2:
6546 case BUILT_IN_ATOMIC_EXCHANGE_4:
6547 case BUILT_IN_ATOMIC_EXCHANGE_8:
6548 case BUILT_IN_ATOMIC_EXCHANGE_16:
6549 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6550 target = expand_builtin_atomic_exchange (mode, exp, target);
6551 if (target)
6552 return target;
6553 break;
6554
6555 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6556 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6557 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6558 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6559 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6560 {
6561 unsigned int nargs, z;
6562 vec<tree, va_gc> *vec;
6563
6564 mode =
6565 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6566 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6567 if (target)
6568 return target;
6569
6570 /* If this is turned into an external library call, the weak parameter
6571 must be dropped to match the expected parameter list. */
6572 nargs = call_expr_nargs (exp);
6573 vec_alloc (vec, nargs - 1);
6574 for (z = 0; z < 3; z++)
6575 vec->quick_push (CALL_EXPR_ARG (exp, z));
6576 /* Skip the boolean weak parameter. */
6577 for (z = 4; z < 6; z++)
6578 vec->quick_push (CALL_EXPR_ARG (exp, z));
6579 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6580 break;
6581 }
6582
6583 case BUILT_IN_ATOMIC_LOAD_1:
6584 case BUILT_IN_ATOMIC_LOAD_2:
6585 case BUILT_IN_ATOMIC_LOAD_4:
6586 case BUILT_IN_ATOMIC_LOAD_8:
6587 case BUILT_IN_ATOMIC_LOAD_16:
6588 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6589 target = expand_builtin_atomic_load (mode, exp, target);
6590 if (target)
6591 return target;
6592 break;
6593
6594 case BUILT_IN_ATOMIC_STORE_1:
6595 case BUILT_IN_ATOMIC_STORE_2:
6596 case BUILT_IN_ATOMIC_STORE_4:
6597 case BUILT_IN_ATOMIC_STORE_8:
6598 case BUILT_IN_ATOMIC_STORE_16:
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6600 target = expand_builtin_atomic_store (mode, exp);
6601 if (target)
6602 return const0_rtx;
6603 break;
6604
6605 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6606 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6607 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6608 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6609 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6610 {
6611 enum built_in_function lib;
6612 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6613 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6614 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6615 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6616 ignore, lib);
6617 if (target)
6618 return target;
6619 break;
6620 }
6621 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6622 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6623 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6624 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6625 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6626 {
6627 enum built_in_function lib;
6628 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6629 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6630 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6631 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6632 ignore, lib);
6633 if (target)
6634 return target;
6635 break;
6636 }
6637 case BUILT_IN_ATOMIC_AND_FETCH_1:
6638 case BUILT_IN_ATOMIC_AND_FETCH_2:
6639 case BUILT_IN_ATOMIC_AND_FETCH_4:
6640 case BUILT_IN_ATOMIC_AND_FETCH_8:
6641 case BUILT_IN_ATOMIC_AND_FETCH_16:
6642 {
6643 enum built_in_function lib;
6644 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6645 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6646 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6647 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6648 ignore, lib);
6649 if (target)
6650 return target;
6651 break;
6652 }
6653 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6654 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6655 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6656 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6657 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6658 {
6659 enum built_in_function lib;
6660 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6661 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6662 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6663 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6664 ignore, lib);
6665 if (target)
6666 return target;
6667 break;
6668 }
6669 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6670 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6671 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6672 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6673 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6674 {
6675 enum built_in_function lib;
6676 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6677 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6678 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6679 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6680 ignore, lib);
6681 if (target)
6682 return target;
6683 break;
6684 }
6685 case BUILT_IN_ATOMIC_OR_FETCH_1:
6686 case BUILT_IN_ATOMIC_OR_FETCH_2:
6687 case BUILT_IN_ATOMIC_OR_FETCH_4:
6688 case BUILT_IN_ATOMIC_OR_FETCH_8:
6689 case BUILT_IN_ATOMIC_OR_FETCH_16:
6690 {
6691 enum built_in_function lib;
6692 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6693 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6694 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6695 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6696 ignore, lib);
6697 if (target)
6698 return target;
6699 break;
6700 }
6701 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6702 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6703 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6704 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6705 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6706 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6707 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6708 ignore, BUILT_IN_NONE);
6709 if (target)
6710 return target;
6711 break;
6712
6713 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6714 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6715 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6716 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6717 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6718 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6719 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6720 ignore, BUILT_IN_NONE);
6721 if (target)
6722 return target;
6723 break;
6724
6725 case BUILT_IN_ATOMIC_FETCH_AND_1:
6726 case BUILT_IN_ATOMIC_FETCH_AND_2:
6727 case BUILT_IN_ATOMIC_FETCH_AND_4:
6728 case BUILT_IN_ATOMIC_FETCH_AND_8:
6729 case BUILT_IN_ATOMIC_FETCH_AND_16:
6730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6732 ignore, BUILT_IN_NONE);
6733 if (target)
6734 return target;
6735 break;
6736
6737 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6738 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6739 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6740 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6741 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6742 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6744 ignore, BUILT_IN_NONE);
6745 if (target)
6746 return target;
6747 break;
6748
6749 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6750 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6751 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6752 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6753 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6754 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6755 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6756 ignore, BUILT_IN_NONE);
6757 if (target)
6758 return target;
6759 break;
6760
6761 case BUILT_IN_ATOMIC_FETCH_OR_1:
6762 case BUILT_IN_ATOMIC_FETCH_OR_2:
6763 case BUILT_IN_ATOMIC_FETCH_OR_4:
6764 case BUILT_IN_ATOMIC_FETCH_OR_8:
6765 case BUILT_IN_ATOMIC_FETCH_OR_16:
6766 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6767 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6768 ignore, BUILT_IN_NONE);
6769 if (target)
6770 return target;
6771 break;
6772
6773 case BUILT_IN_ATOMIC_TEST_AND_SET:
6774 return expand_builtin_atomic_test_and_set (exp, target);
6775
6776 case BUILT_IN_ATOMIC_CLEAR:
6777 return expand_builtin_atomic_clear (exp);
6778
6779 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6780 return expand_builtin_atomic_always_lock_free (exp);
6781
6782 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6783 target = expand_builtin_atomic_is_lock_free (exp);
6784 if (target)
6785 return target;
6786 break;
6787
6788 case BUILT_IN_ATOMIC_THREAD_FENCE:
6789 expand_builtin_atomic_thread_fence (exp);
6790 return const0_rtx;
6791
6792 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6793 expand_builtin_atomic_signal_fence (exp);
6794 return const0_rtx;
6795
6796 case BUILT_IN_OBJECT_SIZE:
6797 return expand_builtin_object_size (exp);
6798
6799 case BUILT_IN_MEMCPY_CHK:
6800 case BUILT_IN_MEMPCPY_CHK:
6801 case BUILT_IN_MEMMOVE_CHK:
6802 case BUILT_IN_MEMSET_CHK:
6803 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6804 if (target)
6805 return target;
6806 break;
6807
6808 case BUILT_IN_STRCPY_CHK:
6809 case BUILT_IN_STPCPY_CHK:
6810 case BUILT_IN_STRNCPY_CHK:
6811 case BUILT_IN_STPNCPY_CHK:
6812 case BUILT_IN_STRCAT_CHK:
6813 case BUILT_IN_STRNCAT_CHK:
6814 case BUILT_IN_SNPRINTF_CHK:
6815 case BUILT_IN_VSNPRINTF_CHK:
6816 maybe_emit_chk_warning (exp, fcode);
6817 break;
6818
6819 case BUILT_IN_SPRINTF_CHK:
6820 case BUILT_IN_VSPRINTF_CHK:
6821 maybe_emit_sprintf_chk_warning (exp, fcode);
6822 break;
6823
6824 case BUILT_IN_FREE:
6825 if (warn_free_nonheap_object)
6826 maybe_emit_free_warning (exp);
6827 break;
6828
6829 case BUILT_IN_THREAD_POINTER:
6830 return expand_builtin_thread_pointer (exp, target);
6831
6832 case BUILT_IN_SET_THREAD_POINTER:
6833 expand_builtin_set_thread_pointer (exp);
6834 return const0_rtx;
6835
6836 case BUILT_IN_CILK_DETACH:
6837 expand_builtin_cilk_detach (exp);
6838 return const0_rtx;
6839
6840 case BUILT_IN_CILK_POP_FRAME:
6841 expand_builtin_cilk_pop_frame (exp);
6842 return const0_rtx;
6843
6844 default: /* just do library call, if unknown builtin */
6845 break;
6846 }
6847
6848 /* The switch statement above can drop through to cause the function
6849 to be called normally. */
6850 return expand_call (exp, target, ignore);
6851 }
6852
6853 /* Determine whether a tree node represents a call to a built-in
6854 function. If the tree T is a call to a built-in function with
6855 the right number of arguments of the appropriate types, return
6856 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6857 Otherwise the return value is END_BUILTINS. */
6858
6859 enum built_in_function
6860 builtin_mathfn_code (const_tree t)
6861 {
6862 const_tree fndecl, arg, parmlist;
6863 const_tree argtype, parmtype;
6864 const_call_expr_arg_iterator iter;
6865
6866 if (TREE_CODE (t) != CALL_EXPR
6867 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6868 return END_BUILTINS;
6869
6870 fndecl = get_callee_fndecl (t);
6871 if (fndecl == NULL_TREE
6872 || TREE_CODE (fndecl) != FUNCTION_DECL
6873 || ! DECL_BUILT_IN (fndecl)
6874 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6875 return END_BUILTINS;
6876
6877 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6878 init_const_call_expr_arg_iterator (t, &iter);
6879 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6880 {
6881 /* If a function doesn't take a variable number of arguments,
6882 the last element in the list will have type `void'. */
6883 parmtype = TREE_VALUE (parmlist);
6884 if (VOID_TYPE_P (parmtype))
6885 {
6886 if (more_const_call_expr_args_p (&iter))
6887 return END_BUILTINS;
6888 return DECL_FUNCTION_CODE (fndecl);
6889 }
6890
6891 if (! more_const_call_expr_args_p (&iter))
6892 return END_BUILTINS;
6893
6894 arg = next_const_call_expr_arg (&iter);
6895 argtype = TREE_TYPE (arg);
6896
6897 if (SCALAR_FLOAT_TYPE_P (parmtype))
6898 {
6899 if (! SCALAR_FLOAT_TYPE_P (argtype))
6900 return END_BUILTINS;
6901 }
6902 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6903 {
6904 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6905 return END_BUILTINS;
6906 }
6907 else if (POINTER_TYPE_P (parmtype))
6908 {
6909 if (! POINTER_TYPE_P (argtype))
6910 return END_BUILTINS;
6911 }
6912 else if (INTEGRAL_TYPE_P (parmtype))
6913 {
6914 if (! INTEGRAL_TYPE_P (argtype))
6915 return END_BUILTINS;
6916 }
6917 else
6918 return END_BUILTINS;
6919 }
6920
6921 /* Variable-length argument list. */
6922 return DECL_FUNCTION_CODE (fndecl);
6923 }
6924
6925 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6926 evaluate to a constant. */
6927
6928 static tree
6929 fold_builtin_constant_p (tree arg)
6930 {
6931 /* We return 1 for a numeric type that's known to be a constant
6932 value at compile-time or for an aggregate type that's a
6933 literal constant. */
6934 STRIP_NOPS (arg);
6935
6936 /* If we know this is a constant, emit the constant of one. */
6937 if (CONSTANT_CLASS_P (arg)
6938 || (TREE_CODE (arg) == CONSTRUCTOR
6939 && TREE_CONSTANT (arg)))
6940 return integer_one_node;
6941 if (TREE_CODE (arg) == ADDR_EXPR)
6942 {
6943 tree op = TREE_OPERAND (arg, 0);
6944 if (TREE_CODE (op) == STRING_CST
6945 || (TREE_CODE (op) == ARRAY_REF
6946 && integer_zerop (TREE_OPERAND (op, 1))
6947 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6948 return integer_one_node;
6949 }
6950
6951 /* If this expression has side effects, show we don't know it to be a
6952 constant. Likewise if it's a pointer or aggregate type since in
6953 those case we only want literals, since those are only optimized
6954 when generating RTL, not later.
6955 And finally, if we are compiling an initializer, not code, we
6956 need to return a definite result now; there's not going to be any
6957 more optimization done. */
6958 if (TREE_SIDE_EFFECTS (arg)
6959 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6960 || POINTER_TYPE_P (TREE_TYPE (arg))
6961 || cfun == 0
6962 || folding_initializer
6963 || force_folding_builtin_constant_p)
6964 return integer_zero_node;
6965
6966 return NULL_TREE;
6967 }
6968
6969 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6970 return it as a truthvalue. */
6971
6972 static tree
6973 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6974 {
6975 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6976
6977 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6978 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6979 ret_type = TREE_TYPE (TREE_TYPE (fn));
6980 pred_type = TREE_VALUE (arg_types);
6981 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6982
6983 pred = fold_convert_loc (loc, pred_type, pred);
6984 expected = fold_convert_loc (loc, expected_type, expected);
6985 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6986
6987 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6988 build_int_cst (ret_type, 0));
6989 }
6990
6991 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6992 NULL_TREE if no simplification is possible. */
6993
6994 static tree
6995 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6996 {
6997 tree inner, fndecl, inner_arg0;
6998 enum tree_code code;
6999
7000 /* Distribute the expected value over short-circuiting operators.
7001 See through the cast from truthvalue_type_node to long. */
7002 inner_arg0 = arg0;
7003 while (TREE_CODE (inner_arg0) == NOP_EXPR
7004 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7005 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7006 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7007
7008 /* If this is a builtin_expect within a builtin_expect keep the
7009 inner one. See through a comparison against a constant. It
7010 might have been added to create a thruthvalue. */
7011 inner = inner_arg0;
7012
7013 if (COMPARISON_CLASS_P (inner)
7014 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7015 inner = TREE_OPERAND (inner, 0);
7016
7017 if (TREE_CODE (inner) == CALL_EXPR
7018 && (fndecl = get_callee_fndecl (inner))
7019 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7020 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7021 return arg0;
7022
7023 inner = inner_arg0;
7024 code = TREE_CODE (inner);
7025 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7026 {
7027 tree op0 = TREE_OPERAND (inner, 0);
7028 tree op1 = TREE_OPERAND (inner, 1);
7029
7030 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7031 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7032 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7033
7034 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7035 }
7036
7037 /* If the argument isn't invariant then there's nothing else we can do. */
7038 if (!TREE_CONSTANT (inner_arg0))
7039 return NULL_TREE;
7040
7041 /* If we expect that a comparison against the argument will fold to
7042 a constant return the constant. In practice, this means a true
7043 constant or the address of a non-weak symbol. */
7044 inner = inner_arg0;
7045 STRIP_NOPS (inner);
7046 if (TREE_CODE (inner) == ADDR_EXPR)
7047 {
7048 do
7049 {
7050 inner = TREE_OPERAND (inner, 0);
7051 }
7052 while (TREE_CODE (inner) == COMPONENT_REF
7053 || TREE_CODE (inner) == ARRAY_REF);
7054 if ((TREE_CODE (inner) == VAR_DECL
7055 || TREE_CODE (inner) == FUNCTION_DECL)
7056 && DECL_WEAK (inner))
7057 return NULL_TREE;
7058 }
7059
7060 /* Otherwise, ARG0 already has the proper type for the return value. */
7061 return arg0;
7062 }
7063
7064 /* Fold a call to __builtin_classify_type with argument ARG. */
7065
7066 static tree
7067 fold_builtin_classify_type (tree arg)
7068 {
7069 if (arg == 0)
7070 return build_int_cst (integer_type_node, no_type_class);
7071
7072 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7073 }
7074
7075 /* Fold a call to __builtin_strlen with argument ARG. */
7076
7077 static tree
7078 fold_builtin_strlen (location_t loc, tree type, tree arg)
7079 {
7080 if (!validate_arg (arg, POINTER_TYPE))
7081 return NULL_TREE;
7082 else
7083 {
7084 tree len = c_strlen (arg, 0);
7085
7086 if (len)
7087 return fold_convert_loc (loc, type, len);
7088
7089 return NULL_TREE;
7090 }
7091 }
7092
7093 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7094
7095 static tree
7096 fold_builtin_inf (location_t loc, tree type, int warn)
7097 {
7098 REAL_VALUE_TYPE real;
7099
7100 /* __builtin_inff is intended to be usable to define INFINITY on all
7101 targets. If an infinity is not available, INFINITY expands "to a
7102 positive constant of type float that overflows at translation
7103 time", footnote "In this case, using INFINITY will violate the
7104 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7105 Thus we pedwarn to ensure this constraint violation is
7106 diagnosed. */
7107 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7108 pedwarn (loc, 0, "target format does not support infinity");
7109
7110 real_inf (&real);
7111 return build_real (type, real);
7112 }
7113
7114 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7115
7116 static tree
7117 fold_builtin_nan (tree arg, tree type, int quiet)
7118 {
7119 REAL_VALUE_TYPE real;
7120 const char *str;
7121
7122 if (!validate_arg (arg, POINTER_TYPE))
7123 return NULL_TREE;
7124 str = c_getstr (arg);
7125 if (!str)
7126 return NULL_TREE;
7127
7128 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7129 return NULL_TREE;
7130
7131 return build_real (type, real);
7132 }
7133
7134 /* Return true if the floating point expression T has an integer value.
7135 We also allow +Inf, -Inf and NaN to be considered integer values. */
7136
7137 static bool
7138 integer_valued_real_p (tree t)
7139 {
7140 switch (TREE_CODE (t))
7141 {
7142 case FLOAT_EXPR:
7143 return true;
7144
7145 case ABS_EXPR:
7146 case SAVE_EXPR:
7147 return integer_valued_real_p (TREE_OPERAND (t, 0));
7148
7149 case COMPOUND_EXPR:
7150 case MODIFY_EXPR:
7151 case BIND_EXPR:
7152 return integer_valued_real_p (TREE_OPERAND (t, 1));
7153
7154 case PLUS_EXPR:
7155 case MINUS_EXPR:
7156 case MULT_EXPR:
7157 case MIN_EXPR:
7158 case MAX_EXPR:
7159 return integer_valued_real_p (TREE_OPERAND (t, 0))
7160 && integer_valued_real_p (TREE_OPERAND (t, 1));
7161
7162 case COND_EXPR:
7163 return integer_valued_real_p (TREE_OPERAND (t, 1))
7164 && integer_valued_real_p (TREE_OPERAND (t, 2));
7165
7166 case REAL_CST:
7167 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7168
7169 case NOP_EXPR:
7170 {
7171 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7172 if (TREE_CODE (type) == INTEGER_TYPE)
7173 return true;
7174 if (TREE_CODE (type) == REAL_TYPE)
7175 return integer_valued_real_p (TREE_OPERAND (t, 0));
7176 break;
7177 }
7178
7179 case CALL_EXPR:
7180 switch (builtin_mathfn_code (t))
7181 {
7182 CASE_FLT_FN (BUILT_IN_CEIL):
7183 CASE_FLT_FN (BUILT_IN_FLOOR):
7184 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7185 CASE_FLT_FN (BUILT_IN_RINT):
7186 CASE_FLT_FN (BUILT_IN_ROUND):
7187 CASE_FLT_FN (BUILT_IN_TRUNC):
7188 return true;
7189
7190 CASE_FLT_FN (BUILT_IN_FMIN):
7191 CASE_FLT_FN (BUILT_IN_FMAX):
7192 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7193 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7194
7195 default:
7196 break;
7197 }
7198 break;
7199
7200 default:
7201 break;
7202 }
7203 return false;
7204 }
7205
7206 /* FNDECL is assumed to be a builtin where truncation can be propagated
7207 across (for instance floor((double)f) == (double)floorf (f).
7208 Do the transformation for a call with argument ARG. */
7209
7210 static tree
7211 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7212 {
7213 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7214
7215 if (!validate_arg (arg, REAL_TYPE))
7216 return NULL_TREE;
7217
7218 /* Integer rounding functions are idempotent. */
7219 if (fcode == builtin_mathfn_code (arg))
7220 return arg;
7221
7222 /* If argument is already integer valued, and we don't need to worry
7223 about setting errno, there's no need to perform rounding. */
7224 if (! flag_errno_math && integer_valued_real_p (arg))
7225 return arg;
7226
7227 if (optimize)
7228 {
7229 tree arg0 = strip_float_extensions (arg);
7230 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7231 tree newtype = TREE_TYPE (arg0);
7232 tree decl;
7233
7234 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7235 && (decl = mathfn_built_in (newtype, fcode)))
7236 return fold_convert_loc (loc, ftype,
7237 build_call_expr_loc (loc, decl, 1,
7238 fold_convert_loc (loc,
7239 newtype,
7240 arg0)));
7241 }
7242 return NULL_TREE;
7243 }
7244
7245 /* FNDECL is assumed to be builtin which can narrow the FP type of
7246 the argument, for instance lround((double)f) -> lroundf (f).
7247 Do the transformation for a call with argument ARG. */
7248
7249 static tree
7250 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7251 {
7252 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7253
7254 if (!validate_arg (arg, REAL_TYPE))
7255 return NULL_TREE;
7256
7257 /* If argument is already integer valued, and we don't need to worry
7258 about setting errno, there's no need to perform rounding. */
7259 if (! flag_errno_math && integer_valued_real_p (arg))
7260 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7261 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7262
7263 if (optimize)
7264 {
7265 tree ftype = TREE_TYPE (arg);
7266 tree arg0 = strip_float_extensions (arg);
7267 tree newtype = TREE_TYPE (arg0);
7268 tree decl;
7269
7270 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7271 && (decl = mathfn_built_in (newtype, fcode)))
7272 return build_call_expr_loc (loc, decl, 1,
7273 fold_convert_loc (loc, newtype, arg0));
7274 }
7275
7276 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7277 sizeof (int) == sizeof (long). */
7278 if (TYPE_PRECISION (integer_type_node)
7279 == TYPE_PRECISION (long_integer_type_node))
7280 {
7281 tree newfn = NULL_TREE;
7282 switch (fcode)
7283 {
7284 CASE_FLT_FN (BUILT_IN_ICEIL):
7285 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7286 break;
7287
7288 CASE_FLT_FN (BUILT_IN_IFLOOR):
7289 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7290 break;
7291
7292 CASE_FLT_FN (BUILT_IN_IROUND):
7293 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7294 break;
7295
7296 CASE_FLT_FN (BUILT_IN_IRINT):
7297 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7298 break;
7299
7300 default:
7301 break;
7302 }
7303
7304 if (newfn)
7305 {
7306 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7307 return fold_convert_loc (loc,
7308 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7309 }
7310 }
7311
7312 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7313 sizeof (long long) == sizeof (long). */
7314 if (TYPE_PRECISION (long_long_integer_type_node)
7315 == TYPE_PRECISION (long_integer_type_node))
7316 {
7317 tree newfn = NULL_TREE;
7318 switch (fcode)
7319 {
7320 CASE_FLT_FN (BUILT_IN_LLCEIL):
7321 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7322 break;
7323
7324 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7325 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7326 break;
7327
7328 CASE_FLT_FN (BUILT_IN_LLROUND):
7329 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7330 break;
7331
7332 CASE_FLT_FN (BUILT_IN_LLRINT):
7333 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7334 break;
7335
7336 default:
7337 break;
7338 }
7339
7340 if (newfn)
7341 {
7342 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7343 return fold_convert_loc (loc,
7344 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7345 }
7346 }
7347
7348 return NULL_TREE;
7349 }
7350
7351 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7352 return type. Return NULL_TREE if no simplification can be made. */
7353
7354 static tree
7355 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7356 {
7357 tree res;
7358
7359 if (!validate_arg (arg, COMPLEX_TYPE)
7360 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7361 return NULL_TREE;
7362
7363 /* Calculate the result when the argument is a constant. */
7364 if (TREE_CODE (arg) == COMPLEX_CST
7365 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7366 type, mpfr_hypot)))
7367 return res;
7368
7369 if (TREE_CODE (arg) == COMPLEX_EXPR)
7370 {
7371 tree real = TREE_OPERAND (arg, 0);
7372 tree imag = TREE_OPERAND (arg, 1);
7373
7374 /* If either part is zero, cabs is fabs of the other. */
7375 if (real_zerop (real))
7376 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7377 if (real_zerop (imag))
7378 return fold_build1_loc (loc, ABS_EXPR, type, real);
7379
7380 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7381 if (flag_unsafe_math_optimizations
7382 && operand_equal_p (real, imag, OEP_PURE_SAME))
7383 {
7384 const REAL_VALUE_TYPE sqrt2_trunc
7385 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7386 STRIP_NOPS (real);
7387 return fold_build2_loc (loc, MULT_EXPR, type,
7388 fold_build1_loc (loc, ABS_EXPR, type, real),
7389 build_real (type, sqrt2_trunc));
7390 }
7391 }
7392
7393 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7394 if (TREE_CODE (arg) == NEGATE_EXPR
7395 || TREE_CODE (arg) == CONJ_EXPR)
7396 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7397
7398 /* Don't do this when optimizing for size. */
7399 if (flag_unsafe_math_optimizations
7400 && optimize && optimize_function_for_speed_p (cfun))
7401 {
7402 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7403
7404 if (sqrtfn != NULL_TREE)
7405 {
7406 tree rpart, ipart, result;
7407
7408 arg = builtin_save_expr (arg);
7409
7410 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7411 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7412
7413 rpart = builtin_save_expr (rpart);
7414 ipart = builtin_save_expr (ipart);
7415
7416 result = fold_build2_loc (loc, PLUS_EXPR, type,
7417 fold_build2_loc (loc, MULT_EXPR, type,
7418 rpart, rpart),
7419 fold_build2_loc (loc, MULT_EXPR, type,
7420 ipart, ipart));
7421
7422 return build_call_expr_loc (loc, sqrtfn, 1, result);
7423 }
7424 }
7425
7426 return NULL_TREE;
7427 }
7428
7429 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7430 complex tree type of the result. If NEG is true, the imaginary
7431 zero is negative. */
7432
7433 static tree
7434 build_complex_cproj (tree type, bool neg)
7435 {
7436 REAL_VALUE_TYPE rinf, rzero = dconst0;
7437
7438 real_inf (&rinf);
7439 rzero.sign = neg;
7440 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7441 build_real (TREE_TYPE (type), rzero));
7442 }
7443
7444 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7445 return type. Return NULL_TREE if no simplification can be made. */
7446
7447 static tree
7448 fold_builtin_cproj (location_t loc, tree arg, tree type)
7449 {
7450 if (!validate_arg (arg, COMPLEX_TYPE)
7451 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7452 return NULL_TREE;
7453
7454 /* If there are no infinities, return arg. */
7455 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7456 return non_lvalue_loc (loc, arg);
7457
7458 /* Calculate the result when the argument is a constant. */
7459 if (TREE_CODE (arg) == COMPLEX_CST)
7460 {
7461 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7462 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7463
7464 if (real_isinf (real) || real_isinf (imag))
7465 return build_complex_cproj (type, imag->sign);
7466 else
7467 return arg;
7468 }
7469 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7470 {
7471 tree real = TREE_OPERAND (arg, 0);
7472 tree imag = TREE_OPERAND (arg, 1);
7473
7474 STRIP_NOPS (real);
7475 STRIP_NOPS (imag);
7476
7477 /* If the real part is inf and the imag part is known to be
7478 nonnegative, return (inf + 0i). Remember side-effects are
7479 possible in the imag part. */
7480 if (TREE_CODE (real) == REAL_CST
7481 && real_isinf (TREE_REAL_CST_PTR (real))
7482 && tree_expr_nonnegative_p (imag))
7483 return omit_one_operand_loc (loc, type,
7484 build_complex_cproj (type, false),
7485 arg);
7486
7487 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7488 Remember side-effects are possible in the real part. */
7489 if (TREE_CODE (imag) == REAL_CST
7490 && real_isinf (TREE_REAL_CST_PTR (imag)))
7491 return
7492 omit_one_operand_loc (loc, type,
7493 build_complex_cproj (type, TREE_REAL_CST_PTR
7494 (imag)->sign), arg);
7495 }
7496
7497 return NULL_TREE;
7498 }
7499
7500 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7501 Return NULL_TREE if no simplification can be made. */
7502
7503 static tree
7504 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7505 {
7506
7507 enum built_in_function fcode;
7508 tree res;
7509
7510 if (!validate_arg (arg, REAL_TYPE))
7511 return NULL_TREE;
7512
7513 /* Calculate the result when the argument is a constant. */
7514 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7515 return res;
7516
7517 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7518 fcode = builtin_mathfn_code (arg);
7519 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7520 {
7521 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7522 arg = fold_build2_loc (loc, MULT_EXPR, type,
7523 CALL_EXPR_ARG (arg, 0),
7524 build_real (type, dconsthalf));
7525 return build_call_expr_loc (loc, expfn, 1, arg);
7526 }
7527
7528 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7529 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7530 {
7531 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7532
7533 if (powfn)
7534 {
7535 tree arg0 = CALL_EXPR_ARG (arg, 0);
7536 tree tree_root;
7537 /* The inner root was either sqrt or cbrt. */
7538 /* This was a conditional expression but it triggered a bug
7539 in Sun C 5.5. */
7540 REAL_VALUE_TYPE dconstroot;
7541 if (BUILTIN_SQRT_P (fcode))
7542 dconstroot = dconsthalf;
7543 else
7544 dconstroot = dconst_third ();
7545
7546 /* Adjust for the outer root. */
7547 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7548 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7549 tree_root = build_real (type, dconstroot);
7550 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7551 }
7552 }
7553
7554 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7555 if (flag_unsafe_math_optimizations
7556 && (fcode == BUILT_IN_POW
7557 || fcode == BUILT_IN_POWF
7558 || fcode == BUILT_IN_POWL))
7559 {
7560 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7561 tree arg0 = CALL_EXPR_ARG (arg, 0);
7562 tree arg1 = CALL_EXPR_ARG (arg, 1);
7563 tree narg1;
7564 if (!tree_expr_nonnegative_p (arg0))
7565 arg0 = build1 (ABS_EXPR, type, arg0);
7566 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7567 build_real (type, dconsthalf));
7568 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7569 }
7570
7571 return NULL_TREE;
7572 }
7573
7574 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7575 Return NULL_TREE if no simplification can be made. */
7576
7577 static tree
7578 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7579 {
7580 const enum built_in_function fcode = builtin_mathfn_code (arg);
7581 tree res;
7582
7583 if (!validate_arg (arg, REAL_TYPE))
7584 return NULL_TREE;
7585
7586 /* Calculate the result when the argument is a constant. */
7587 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7588 return res;
7589
7590 if (flag_unsafe_math_optimizations)
7591 {
7592 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7593 if (BUILTIN_EXPONENT_P (fcode))
7594 {
7595 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7596 const REAL_VALUE_TYPE third_trunc =
7597 real_value_truncate (TYPE_MODE (type), dconst_third ());
7598 arg = fold_build2_loc (loc, MULT_EXPR, type,
7599 CALL_EXPR_ARG (arg, 0),
7600 build_real (type, third_trunc));
7601 return build_call_expr_loc (loc, expfn, 1, arg);
7602 }
7603
7604 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7605 if (BUILTIN_SQRT_P (fcode))
7606 {
7607 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7608
7609 if (powfn)
7610 {
7611 tree arg0 = CALL_EXPR_ARG (arg, 0);
7612 tree tree_root;
7613 REAL_VALUE_TYPE dconstroot = dconst_third ();
7614
7615 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7616 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7617 tree_root = build_real (type, dconstroot);
7618 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7619 }
7620 }
7621
7622 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7623 if (BUILTIN_CBRT_P (fcode))
7624 {
7625 tree arg0 = CALL_EXPR_ARG (arg, 0);
7626 if (tree_expr_nonnegative_p (arg0))
7627 {
7628 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7629
7630 if (powfn)
7631 {
7632 tree tree_root;
7633 REAL_VALUE_TYPE dconstroot;
7634
7635 real_arithmetic (&dconstroot, MULT_EXPR,
7636 dconst_third_ptr (), dconst_third_ptr ());
7637 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7638 tree_root = build_real (type, dconstroot);
7639 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7640 }
7641 }
7642 }
7643
7644 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7645 if (fcode == BUILT_IN_POW
7646 || fcode == BUILT_IN_POWF
7647 || fcode == BUILT_IN_POWL)
7648 {
7649 tree arg00 = CALL_EXPR_ARG (arg, 0);
7650 tree arg01 = CALL_EXPR_ARG (arg, 1);
7651 if (tree_expr_nonnegative_p (arg00))
7652 {
7653 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7654 const REAL_VALUE_TYPE dconstroot
7655 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7656 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7657 build_real (type, dconstroot));
7658 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7659 }
7660 }
7661 }
7662 return NULL_TREE;
7663 }
7664
7665 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7666 TYPE is the type of the return value. Return NULL_TREE if no
7667 simplification can be made. */
7668
7669 static tree
7670 fold_builtin_cos (location_t loc,
7671 tree arg, tree type, tree fndecl)
7672 {
7673 tree res, narg;
7674
7675 if (!validate_arg (arg, REAL_TYPE))
7676 return NULL_TREE;
7677
7678 /* Calculate the result when the argument is a constant. */
7679 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7680 return res;
7681
7682 /* Optimize cos(-x) into cos (x). */
7683 if ((narg = fold_strip_sign_ops (arg)))
7684 return build_call_expr_loc (loc, fndecl, 1, narg);
7685
7686 return NULL_TREE;
7687 }
7688
7689 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7690 Return NULL_TREE if no simplification can be made. */
7691
7692 static tree
7693 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7694 {
7695 if (validate_arg (arg, REAL_TYPE))
7696 {
7697 tree res, narg;
7698
7699 /* Calculate the result when the argument is a constant. */
7700 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7701 return res;
7702
7703 /* Optimize cosh(-x) into cosh (x). */
7704 if ((narg = fold_strip_sign_ops (arg)))
7705 return build_call_expr_loc (loc, fndecl, 1, narg);
7706 }
7707
7708 return NULL_TREE;
7709 }
7710
7711 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7712 argument ARG. TYPE is the type of the return value. Return
7713 NULL_TREE if no simplification can be made. */
7714
7715 static tree
7716 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7717 bool hyper)
7718 {
7719 if (validate_arg (arg, COMPLEX_TYPE)
7720 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7721 {
7722 tree tmp;
7723
7724 /* Calculate the result when the argument is a constant. */
7725 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7726 return tmp;
7727
7728 /* Optimize fn(-x) into fn(x). */
7729 if ((tmp = fold_strip_sign_ops (arg)))
7730 return build_call_expr_loc (loc, fndecl, 1, tmp);
7731 }
7732
7733 return NULL_TREE;
7734 }
7735
7736 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7737 Return NULL_TREE if no simplification can be made. */
7738
7739 static tree
7740 fold_builtin_tan (tree arg, tree type)
7741 {
7742 enum built_in_function fcode;
7743 tree res;
7744
7745 if (!validate_arg (arg, REAL_TYPE))
7746 return NULL_TREE;
7747
7748 /* Calculate the result when the argument is a constant. */
7749 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7750 return res;
7751
7752 /* Optimize tan(atan(x)) = x. */
7753 fcode = builtin_mathfn_code (arg);
7754 if (flag_unsafe_math_optimizations
7755 && (fcode == BUILT_IN_ATAN
7756 || fcode == BUILT_IN_ATANF
7757 || fcode == BUILT_IN_ATANL))
7758 return CALL_EXPR_ARG (arg, 0);
7759
7760 return NULL_TREE;
7761 }
7762
7763 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7764 NULL_TREE if no simplification can be made. */
7765
7766 static tree
7767 fold_builtin_sincos (location_t loc,
7768 tree arg0, tree arg1, tree arg2)
7769 {
7770 tree type;
7771 tree res, fn, call;
7772
7773 if (!validate_arg (arg0, REAL_TYPE)
7774 || !validate_arg (arg1, POINTER_TYPE)
7775 || !validate_arg (arg2, POINTER_TYPE))
7776 return NULL_TREE;
7777
7778 type = TREE_TYPE (arg0);
7779
7780 /* Calculate the result when the argument is a constant. */
7781 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7782 return res;
7783
7784 /* Canonicalize sincos to cexpi. */
7785 if (!targetm.libc_has_function (function_c99_math_complex))
7786 return NULL_TREE;
7787 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7788 if (!fn)
7789 return NULL_TREE;
7790
7791 call = build_call_expr_loc (loc, fn, 1, arg0);
7792 call = builtin_save_expr (call);
7793
7794 return build2 (COMPOUND_EXPR, void_type_node,
7795 build2 (MODIFY_EXPR, void_type_node,
7796 build_fold_indirect_ref_loc (loc, arg1),
7797 build1 (IMAGPART_EXPR, type, call)),
7798 build2 (MODIFY_EXPR, void_type_node,
7799 build_fold_indirect_ref_loc (loc, arg2),
7800 build1 (REALPART_EXPR, type, call)));
7801 }
7802
7803 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7804 NULL_TREE if no simplification can be made. */
7805
7806 static tree
7807 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7808 {
7809 tree rtype;
7810 tree realp, imagp, ifn;
7811 tree res;
7812
7813 if (!validate_arg (arg0, COMPLEX_TYPE)
7814 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7815 return NULL_TREE;
7816
7817 /* Calculate the result when the argument is a constant. */
7818 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7819 return res;
7820
7821 rtype = TREE_TYPE (TREE_TYPE (arg0));
7822
7823 /* In case we can figure out the real part of arg0 and it is constant zero
7824 fold to cexpi. */
7825 if (!targetm.libc_has_function (function_c99_math_complex))
7826 return NULL_TREE;
7827 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7828 if (!ifn)
7829 return NULL_TREE;
7830
7831 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7832 && real_zerop (realp))
7833 {
7834 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7835 return build_call_expr_loc (loc, ifn, 1, narg);
7836 }
7837
7838 /* In case we can easily decompose real and imaginary parts split cexp
7839 to exp (r) * cexpi (i). */
7840 if (flag_unsafe_math_optimizations
7841 && realp)
7842 {
7843 tree rfn, rcall, icall;
7844
7845 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7846 if (!rfn)
7847 return NULL_TREE;
7848
7849 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7850 if (!imagp)
7851 return NULL_TREE;
7852
7853 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7854 icall = builtin_save_expr (icall);
7855 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7856 rcall = builtin_save_expr (rcall);
7857 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7858 fold_build2_loc (loc, MULT_EXPR, rtype,
7859 rcall,
7860 fold_build1_loc (loc, REALPART_EXPR,
7861 rtype, icall)),
7862 fold_build2_loc (loc, MULT_EXPR, rtype,
7863 rcall,
7864 fold_build1_loc (loc, IMAGPART_EXPR,
7865 rtype, icall)));
7866 }
7867
7868 return NULL_TREE;
7869 }
7870
7871 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7872 Return NULL_TREE if no simplification can be made. */
7873
7874 static tree
7875 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7876 {
7877 if (!validate_arg (arg, REAL_TYPE))
7878 return NULL_TREE;
7879
7880 /* Optimize trunc of constant value. */
7881 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7882 {
7883 REAL_VALUE_TYPE r, x;
7884 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7885
7886 x = TREE_REAL_CST (arg);
7887 real_trunc (&r, TYPE_MODE (type), &x);
7888 return build_real (type, r);
7889 }
7890
7891 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7892 }
7893
7894 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7895 Return NULL_TREE if no simplification can be made. */
7896
7897 static tree
7898 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7899 {
7900 if (!validate_arg (arg, REAL_TYPE))
7901 return NULL_TREE;
7902
7903 /* Optimize floor of constant value. */
7904 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7905 {
7906 REAL_VALUE_TYPE x;
7907
7908 x = TREE_REAL_CST (arg);
7909 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7910 {
7911 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7912 REAL_VALUE_TYPE r;
7913
7914 real_floor (&r, TYPE_MODE (type), &x);
7915 return build_real (type, r);
7916 }
7917 }
7918
7919 /* Fold floor (x) where x is nonnegative to trunc (x). */
7920 if (tree_expr_nonnegative_p (arg))
7921 {
7922 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7923 if (truncfn)
7924 return build_call_expr_loc (loc, truncfn, 1, arg);
7925 }
7926
7927 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7928 }
7929
7930 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7931 Return NULL_TREE if no simplification can be made. */
7932
7933 static tree
7934 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7935 {
7936 if (!validate_arg (arg, REAL_TYPE))
7937 return NULL_TREE;
7938
7939 /* Optimize ceil of constant value. */
7940 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7941 {
7942 REAL_VALUE_TYPE x;
7943
7944 x = TREE_REAL_CST (arg);
7945 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7946 {
7947 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7948 REAL_VALUE_TYPE r;
7949
7950 real_ceil (&r, TYPE_MODE (type), &x);
7951 return build_real (type, r);
7952 }
7953 }
7954
7955 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7956 }
7957
7958 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7959 Return NULL_TREE if no simplification can be made. */
7960
7961 static tree
7962 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7963 {
7964 if (!validate_arg (arg, REAL_TYPE))
7965 return NULL_TREE;
7966
7967 /* Optimize round of constant value. */
7968 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7969 {
7970 REAL_VALUE_TYPE x;
7971
7972 x = TREE_REAL_CST (arg);
7973 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7974 {
7975 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7976 REAL_VALUE_TYPE r;
7977
7978 real_round (&r, TYPE_MODE (type), &x);
7979 return build_real (type, r);
7980 }
7981 }
7982
7983 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7984 }
7985
7986 /* Fold function call to builtin lround, lroundf or lroundl (or the
7987 corresponding long long versions) and other rounding functions. ARG
7988 is the argument to the call. Return NULL_TREE if no simplification
7989 can be made. */
7990
7991 static tree
7992 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7993 {
7994 if (!validate_arg (arg, REAL_TYPE))
7995 return NULL_TREE;
7996
7997 /* Optimize lround of constant value. */
7998 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7999 {
8000 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8001
8002 if (real_isfinite (&x))
8003 {
8004 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8005 tree ftype = TREE_TYPE (arg);
8006 double_int val;
8007 REAL_VALUE_TYPE r;
8008
8009 switch (DECL_FUNCTION_CODE (fndecl))
8010 {
8011 CASE_FLT_FN (BUILT_IN_IFLOOR):
8012 CASE_FLT_FN (BUILT_IN_LFLOOR):
8013 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8014 real_floor (&r, TYPE_MODE (ftype), &x);
8015 break;
8016
8017 CASE_FLT_FN (BUILT_IN_ICEIL):
8018 CASE_FLT_FN (BUILT_IN_LCEIL):
8019 CASE_FLT_FN (BUILT_IN_LLCEIL):
8020 real_ceil (&r, TYPE_MODE (ftype), &x);
8021 break;
8022
8023 CASE_FLT_FN (BUILT_IN_IROUND):
8024 CASE_FLT_FN (BUILT_IN_LROUND):
8025 CASE_FLT_FN (BUILT_IN_LLROUND):
8026 real_round (&r, TYPE_MODE (ftype), &x);
8027 break;
8028
8029 default:
8030 gcc_unreachable ();
8031 }
8032
8033 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8034 if (double_int_fits_to_tree_p (itype, val))
8035 return double_int_to_tree (itype, val);
8036 }
8037 }
8038
8039 switch (DECL_FUNCTION_CODE (fndecl))
8040 {
8041 CASE_FLT_FN (BUILT_IN_LFLOOR):
8042 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8043 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8044 if (tree_expr_nonnegative_p (arg))
8045 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8046 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8047 break;
8048 default:;
8049 }
8050
8051 return fold_fixed_mathfn (loc, fndecl, arg);
8052 }
8053
8054 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8055 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8056 the argument to the call. Return NULL_TREE if no simplification can
8057 be made. */
8058
8059 static tree
8060 fold_builtin_bitop (tree fndecl, tree arg)
8061 {
8062 if (!validate_arg (arg, INTEGER_TYPE))
8063 return NULL_TREE;
8064
8065 /* Optimize for constant argument. */
8066 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8067 {
8068 HOST_WIDE_INT hi, width, result;
8069 unsigned HOST_WIDE_INT lo;
8070 tree type;
8071
8072 type = TREE_TYPE (arg);
8073 width = TYPE_PRECISION (type);
8074 lo = TREE_INT_CST_LOW (arg);
8075
8076 /* Clear all the bits that are beyond the type's precision. */
8077 if (width > HOST_BITS_PER_WIDE_INT)
8078 {
8079 hi = TREE_INT_CST_HIGH (arg);
8080 if (width < HOST_BITS_PER_DOUBLE_INT)
8081 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8082 }
8083 else
8084 {
8085 hi = 0;
8086 if (width < HOST_BITS_PER_WIDE_INT)
8087 lo &= ~(HOST_WIDE_INT_M1U << width);
8088 }
8089
8090 switch (DECL_FUNCTION_CODE (fndecl))
8091 {
8092 CASE_INT_FN (BUILT_IN_FFS):
8093 if (lo != 0)
8094 result = ffs_hwi (lo);
8095 else if (hi != 0)
8096 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8097 else
8098 result = 0;
8099 break;
8100
8101 CASE_INT_FN (BUILT_IN_CLZ):
8102 if (hi != 0)
8103 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8104 else if (lo != 0)
8105 result = width - floor_log2 (lo) - 1;
8106 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8107 result = width;
8108 break;
8109
8110 CASE_INT_FN (BUILT_IN_CTZ):
8111 if (lo != 0)
8112 result = ctz_hwi (lo);
8113 else if (hi != 0)
8114 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8115 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8116 result = width;
8117 break;
8118
8119 CASE_INT_FN (BUILT_IN_CLRSB):
8120 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8121 return NULL_TREE;
8122 if (width > HOST_BITS_PER_WIDE_INT
8123 && (hi & ((unsigned HOST_WIDE_INT) 1
8124 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8125 {
8126 hi = ~hi & ~(HOST_WIDE_INT_M1U
8127 << (width - HOST_BITS_PER_WIDE_INT - 1));
8128 lo = ~lo;
8129 }
8130 else if (width <= HOST_BITS_PER_WIDE_INT
8131 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8132 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8133 if (hi != 0)
8134 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8135 else if (lo != 0)
8136 result = width - floor_log2 (lo) - 2;
8137 else
8138 result = width - 1;
8139 break;
8140
8141 CASE_INT_FN (BUILT_IN_POPCOUNT):
8142 result = 0;
8143 while (lo)
8144 result++, lo &= lo - 1;
8145 while (hi)
8146 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8147 break;
8148
8149 CASE_INT_FN (BUILT_IN_PARITY):
8150 result = 0;
8151 while (lo)
8152 result++, lo &= lo - 1;
8153 while (hi)
8154 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8155 result &= 1;
8156 break;
8157
8158 default:
8159 gcc_unreachable ();
8160 }
8161
8162 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8163 }
8164
8165 return NULL_TREE;
8166 }
8167
8168 /* Fold function call to builtin_bswap and the short, long and long long
8169 variants. Return NULL_TREE if no simplification can be made. */
8170 static tree
8171 fold_builtin_bswap (tree fndecl, tree arg)
8172 {
8173 if (! validate_arg (arg, INTEGER_TYPE))
8174 return NULL_TREE;
8175
8176 /* Optimize constant value. */
8177 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8178 {
8179 HOST_WIDE_INT hi, width, r_hi = 0;
8180 unsigned HOST_WIDE_INT lo, r_lo = 0;
8181 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8182
8183 width = TYPE_PRECISION (type);
8184 lo = TREE_INT_CST_LOW (arg);
8185 hi = TREE_INT_CST_HIGH (arg);
8186
8187 switch (DECL_FUNCTION_CODE (fndecl))
8188 {
8189 case BUILT_IN_BSWAP16:
8190 case BUILT_IN_BSWAP32:
8191 case BUILT_IN_BSWAP64:
8192 {
8193 int s;
8194
8195 for (s = 0; s < width; s += 8)
8196 {
8197 int d = width - s - 8;
8198 unsigned HOST_WIDE_INT byte;
8199
8200 if (s < HOST_BITS_PER_WIDE_INT)
8201 byte = (lo >> s) & 0xff;
8202 else
8203 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8204
8205 if (d < HOST_BITS_PER_WIDE_INT)
8206 r_lo |= byte << d;
8207 else
8208 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8209 }
8210 }
8211
8212 break;
8213
8214 default:
8215 gcc_unreachable ();
8216 }
8217
8218 if (width < HOST_BITS_PER_WIDE_INT)
8219 return build_int_cst (type, r_lo);
8220 else
8221 return build_int_cst_wide (type, r_lo, r_hi);
8222 }
8223
8224 return NULL_TREE;
8225 }
8226
8227 /* A subroutine of fold_builtin to fold the various logarithmic
8228 functions. Return NULL_TREE if no simplification can me made.
8229 FUNC is the corresponding MPFR logarithm function. */
8230
8231 static tree
8232 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8233 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8234 {
8235 if (validate_arg (arg, REAL_TYPE))
8236 {
8237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8238 tree res;
8239 const enum built_in_function fcode = builtin_mathfn_code (arg);
8240
8241 /* Calculate the result when the argument is a constant. */
8242 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8243 return res;
8244
8245 /* Special case, optimize logN(expN(x)) = x. */
8246 if (flag_unsafe_math_optimizations
8247 && ((func == mpfr_log
8248 && (fcode == BUILT_IN_EXP
8249 || fcode == BUILT_IN_EXPF
8250 || fcode == BUILT_IN_EXPL))
8251 || (func == mpfr_log2
8252 && (fcode == BUILT_IN_EXP2
8253 || fcode == BUILT_IN_EXP2F
8254 || fcode == BUILT_IN_EXP2L))
8255 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8256 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8257
8258 /* Optimize logN(func()) for various exponential functions. We
8259 want to determine the value "x" and the power "exponent" in
8260 order to transform logN(x**exponent) into exponent*logN(x). */
8261 if (flag_unsafe_math_optimizations)
8262 {
8263 tree exponent = 0, x = 0;
8264
8265 switch (fcode)
8266 {
8267 CASE_FLT_FN (BUILT_IN_EXP):
8268 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8269 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8270 dconst_e ()));
8271 exponent = CALL_EXPR_ARG (arg, 0);
8272 break;
8273 CASE_FLT_FN (BUILT_IN_EXP2):
8274 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8275 x = build_real (type, dconst2);
8276 exponent = CALL_EXPR_ARG (arg, 0);
8277 break;
8278 CASE_FLT_FN (BUILT_IN_EXP10):
8279 CASE_FLT_FN (BUILT_IN_POW10):
8280 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8281 {
8282 REAL_VALUE_TYPE dconst10;
8283 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8284 x = build_real (type, dconst10);
8285 }
8286 exponent = CALL_EXPR_ARG (arg, 0);
8287 break;
8288 CASE_FLT_FN (BUILT_IN_SQRT):
8289 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8290 x = CALL_EXPR_ARG (arg, 0);
8291 exponent = build_real (type, dconsthalf);
8292 break;
8293 CASE_FLT_FN (BUILT_IN_CBRT):
8294 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8295 x = CALL_EXPR_ARG (arg, 0);
8296 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8297 dconst_third ()));
8298 break;
8299 CASE_FLT_FN (BUILT_IN_POW):
8300 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8301 x = CALL_EXPR_ARG (arg, 0);
8302 exponent = CALL_EXPR_ARG (arg, 1);
8303 break;
8304 default:
8305 break;
8306 }
8307
8308 /* Now perform the optimization. */
8309 if (x && exponent)
8310 {
8311 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8312 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8313 }
8314 }
8315 }
8316
8317 return NULL_TREE;
8318 }
8319
8320 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8321 NULL_TREE if no simplification can be made. */
8322
8323 static tree
8324 fold_builtin_hypot (location_t loc, tree fndecl,
8325 tree arg0, tree arg1, tree type)
8326 {
8327 tree res, narg0, narg1;
8328
8329 if (!validate_arg (arg0, REAL_TYPE)
8330 || !validate_arg (arg1, REAL_TYPE))
8331 return NULL_TREE;
8332
8333 /* Calculate the result when the argument is a constant. */
8334 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8335 return res;
8336
8337 /* If either argument to hypot has a negate or abs, strip that off.
8338 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8339 narg0 = fold_strip_sign_ops (arg0);
8340 narg1 = fold_strip_sign_ops (arg1);
8341 if (narg0 || narg1)
8342 {
8343 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8344 narg1 ? narg1 : arg1);
8345 }
8346
8347 /* If either argument is zero, hypot is fabs of the other. */
8348 if (real_zerop (arg0))
8349 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8350 else if (real_zerop (arg1))
8351 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8352
8353 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8354 if (flag_unsafe_math_optimizations
8355 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8356 {
8357 const REAL_VALUE_TYPE sqrt2_trunc
8358 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8359 return fold_build2_loc (loc, MULT_EXPR, type,
8360 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8361 build_real (type, sqrt2_trunc));
8362 }
8363
8364 return NULL_TREE;
8365 }
8366
8367
8368 /* Fold a builtin function call to pow, powf, or powl. Return
8369 NULL_TREE if no simplification can be made. */
8370 static tree
8371 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8372 {
8373 tree res;
8374
8375 if (!validate_arg (arg0, REAL_TYPE)
8376 || !validate_arg (arg1, REAL_TYPE))
8377 return NULL_TREE;
8378
8379 /* Calculate the result when the argument is a constant. */
8380 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8381 return res;
8382
8383 /* Optimize pow(1.0,y) = 1.0. */
8384 if (real_onep (arg0))
8385 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8386
8387 if (TREE_CODE (arg1) == REAL_CST
8388 && !TREE_OVERFLOW (arg1))
8389 {
8390 REAL_VALUE_TYPE cint;
8391 REAL_VALUE_TYPE c;
8392 HOST_WIDE_INT n;
8393
8394 c = TREE_REAL_CST (arg1);
8395
8396 /* Optimize pow(x,0.0) = 1.0. */
8397 if (REAL_VALUES_EQUAL (c, dconst0))
8398 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8399 arg0);
8400
8401 /* Optimize pow(x,1.0) = x. */
8402 if (REAL_VALUES_EQUAL (c, dconst1))
8403 return arg0;
8404
8405 /* Optimize pow(x,-1.0) = 1.0/x. */
8406 if (REAL_VALUES_EQUAL (c, dconstm1))
8407 return fold_build2_loc (loc, RDIV_EXPR, type,
8408 build_real (type, dconst1), arg0);
8409
8410 /* Optimize pow(x,0.5) = sqrt(x). */
8411 if (flag_unsafe_math_optimizations
8412 && REAL_VALUES_EQUAL (c, dconsthalf))
8413 {
8414 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8415
8416 if (sqrtfn != NULL_TREE)
8417 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8418 }
8419
8420 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8421 if (flag_unsafe_math_optimizations)
8422 {
8423 const REAL_VALUE_TYPE dconstroot
8424 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8425
8426 if (REAL_VALUES_EQUAL (c, dconstroot))
8427 {
8428 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8429 if (cbrtfn != NULL_TREE)
8430 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8431 }
8432 }
8433
8434 /* Check for an integer exponent. */
8435 n = real_to_integer (&c);
8436 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8437 if (real_identical (&c, &cint))
8438 {
8439 /* Attempt to evaluate pow at compile-time, unless this should
8440 raise an exception. */
8441 if (TREE_CODE (arg0) == REAL_CST
8442 && !TREE_OVERFLOW (arg0)
8443 && (n > 0
8444 || (!flag_trapping_math && !flag_errno_math)
8445 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8446 {
8447 REAL_VALUE_TYPE x;
8448 bool inexact;
8449
8450 x = TREE_REAL_CST (arg0);
8451 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8452 if (flag_unsafe_math_optimizations || !inexact)
8453 return build_real (type, x);
8454 }
8455
8456 /* Strip sign ops from even integer powers. */
8457 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8458 {
8459 tree narg0 = fold_strip_sign_ops (arg0);
8460 if (narg0)
8461 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8462 }
8463 }
8464 }
8465
8466 if (flag_unsafe_math_optimizations)
8467 {
8468 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8469
8470 /* Optimize pow(expN(x),y) = expN(x*y). */
8471 if (BUILTIN_EXPONENT_P (fcode))
8472 {
8473 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8474 tree arg = CALL_EXPR_ARG (arg0, 0);
8475 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8476 return build_call_expr_loc (loc, expfn, 1, arg);
8477 }
8478
8479 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8480 if (BUILTIN_SQRT_P (fcode))
8481 {
8482 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8483 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8484 build_real (type, dconsthalf));
8485 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8486 }
8487
8488 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8489 if (BUILTIN_CBRT_P (fcode))
8490 {
8491 tree arg = CALL_EXPR_ARG (arg0, 0);
8492 if (tree_expr_nonnegative_p (arg))
8493 {
8494 const REAL_VALUE_TYPE dconstroot
8495 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8496 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8497 build_real (type, dconstroot));
8498 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8499 }
8500 }
8501
8502 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8503 if (fcode == BUILT_IN_POW
8504 || fcode == BUILT_IN_POWF
8505 || fcode == BUILT_IN_POWL)
8506 {
8507 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8508 if (tree_expr_nonnegative_p (arg00))
8509 {
8510 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8511 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8512 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8513 }
8514 }
8515 }
8516
8517 return NULL_TREE;
8518 }
8519
8520 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8521 Return NULL_TREE if no simplification can be made. */
8522 static tree
8523 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8524 tree arg0, tree arg1, tree type)
8525 {
8526 if (!validate_arg (arg0, REAL_TYPE)
8527 || !validate_arg (arg1, INTEGER_TYPE))
8528 return NULL_TREE;
8529
8530 /* Optimize pow(1.0,y) = 1.0. */
8531 if (real_onep (arg0))
8532 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8533
8534 if (tree_fits_shwi_p (arg1))
8535 {
8536 HOST_WIDE_INT c = tree_to_shwi (arg1);
8537
8538 /* Evaluate powi at compile-time. */
8539 if (TREE_CODE (arg0) == REAL_CST
8540 && !TREE_OVERFLOW (arg0))
8541 {
8542 REAL_VALUE_TYPE x;
8543 x = TREE_REAL_CST (arg0);
8544 real_powi (&x, TYPE_MODE (type), &x, c);
8545 return build_real (type, x);
8546 }
8547
8548 /* Optimize pow(x,0) = 1.0. */
8549 if (c == 0)
8550 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8551 arg0);
8552
8553 /* Optimize pow(x,1) = x. */
8554 if (c == 1)
8555 return arg0;
8556
8557 /* Optimize pow(x,-1) = 1.0/x. */
8558 if (c == -1)
8559 return fold_build2_loc (loc, RDIV_EXPR, type,
8560 build_real (type, dconst1), arg0);
8561 }
8562
8563 return NULL_TREE;
8564 }
8565
8566 /* A subroutine of fold_builtin to fold the various exponent
8567 functions. Return NULL_TREE if no simplification can be made.
8568 FUNC is the corresponding MPFR exponent function. */
8569
8570 static tree
8571 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8572 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8573 {
8574 if (validate_arg (arg, REAL_TYPE))
8575 {
8576 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8577 tree res;
8578
8579 /* Calculate the result when the argument is a constant. */
8580 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8581 return res;
8582
8583 /* Optimize expN(logN(x)) = x. */
8584 if (flag_unsafe_math_optimizations)
8585 {
8586 const enum built_in_function fcode = builtin_mathfn_code (arg);
8587
8588 if ((func == mpfr_exp
8589 && (fcode == BUILT_IN_LOG
8590 || fcode == BUILT_IN_LOGF
8591 || fcode == BUILT_IN_LOGL))
8592 || (func == mpfr_exp2
8593 && (fcode == BUILT_IN_LOG2
8594 || fcode == BUILT_IN_LOG2F
8595 || fcode == BUILT_IN_LOG2L))
8596 || (func == mpfr_exp10
8597 && (fcode == BUILT_IN_LOG10
8598 || fcode == BUILT_IN_LOG10F
8599 || fcode == BUILT_IN_LOG10L)))
8600 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8601 }
8602 }
8603
8604 return NULL_TREE;
8605 }
8606
8607 /* Return true if VAR is a VAR_DECL or a component thereof. */
8608
8609 static bool
8610 var_decl_component_p (tree var)
8611 {
8612 tree inner = var;
8613 while (handled_component_p (inner))
8614 inner = TREE_OPERAND (inner, 0);
8615 return SSA_VAR_P (inner);
8616 }
8617
8618 /* Fold function call to builtin memset. Return
8619 NULL_TREE if no simplification can be made. */
8620
8621 static tree
8622 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8623 tree type, bool ignore)
8624 {
8625 tree var, ret, etype;
8626 unsigned HOST_WIDE_INT length, cval;
8627
8628 if (! validate_arg (dest, POINTER_TYPE)
8629 || ! validate_arg (c, INTEGER_TYPE)
8630 || ! validate_arg (len, INTEGER_TYPE))
8631 return NULL_TREE;
8632
8633 if (! tree_fits_uhwi_p (len))
8634 return NULL_TREE;
8635
8636 /* If the LEN parameter is zero, return DEST. */
8637 if (integer_zerop (len))
8638 return omit_one_operand_loc (loc, type, dest, c);
8639
8640 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8641 return NULL_TREE;
8642
8643 var = dest;
8644 STRIP_NOPS (var);
8645 if (TREE_CODE (var) != ADDR_EXPR)
8646 return NULL_TREE;
8647
8648 var = TREE_OPERAND (var, 0);
8649 if (TREE_THIS_VOLATILE (var))
8650 return NULL_TREE;
8651
8652 etype = TREE_TYPE (var);
8653 if (TREE_CODE (etype) == ARRAY_TYPE)
8654 etype = TREE_TYPE (etype);
8655
8656 if (!INTEGRAL_TYPE_P (etype)
8657 && !POINTER_TYPE_P (etype))
8658 return NULL_TREE;
8659
8660 if (! var_decl_component_p (var))
8661 return NULL_TREE;
8662
8663 length = tree_to_uhwi (len);
8664 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8665 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8666 return NULL_TREE;
8667
8668 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8669 return NULL_TREE;
8670
8671 if (integer_zerop (c))
8672 cval = 0;
8673 else
8674 {
8675 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8676 return NULL_TREE;
8677
8678 cval = TREE_INT_CST_LOW (c);
8679 cval &= 0xff;
8680 cval |= cval << 8;
8681 cval |= cval << 16;
8682 cval |= (cval << 31) << 1;
8683 }
8684
8685 ret = build_int_cst_type (etype, cval);
8686 var = build_fold_indirect_ref_loc (loc,
8687 fold_convert_loc (loc,
8688 build_pointer_type (etype),
8689 dest));
8690 ret = build2 (MODIFY_EXPR, etype, var, ret);
8691 if (ignore)
8692 return ret;
8693
8694 return omit_one_operand_loc (loc, type, dest, ret);
8695 }
8696
8697 /* Fold function call to builtin memset. Return
8698 NULL_TREE if no simplification can be made. */
8699
8700 static tree
8701 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8702 {
8703 if (! validate_arg (dest, POINTER_TYPE)
8704 || ! validate_arg (size, INTEGER_TYPE))
8705 return NULL_TREE;
8706
8707 if (!ignore)
8708 return NULL_TREE;
8709
8710 /* New argument list transforming bzero(ptr x, int y) to
8711 memset(ptr x, int 0, size_t y). This is done this way
8712 so that if it isn't expanded inline, we fallback to
8713 calling bzero instead of memset. */
8714
8715 return fold_builtin_memset (loc, dest, integer_zero_node,
8716 fold_convert_loc (loc, size_type_node, size),
8717 void_type_node, ignore);
8718 }
8719
8720 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8721 NULL_TREE if no simplification can be made.
8722 If ENDP is 0, return DEST (like memcpy).
8723 If ENDP is 1, return DEST+LEN (like mempcpy).
8724 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8725 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8726 (memmove). */
8727
8728 static tree
8729 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8730 tree len, tree type, bool ignore, int endp)
8731 {
8732 tree destvar, srcvar, expr;
8733
8734 if (! validate_arg (dest, POINTER_TYPE)
8735 || ! validate_arg (src, POINTER_TYPE)
8736 || ! validate_arg (len, INTEGER_TYPE))
8737 return NULL_TREE;
8738
8739 /* If the LEN parameter is zero, return DEST. */
8740 if (integer_zerop (len))
8741 return omit_one_operand_loc (loc, type, dest, src);
8742
8743 /* If SRC and DEST are the same (and not volatile), return
8744 DEST{,+LEN,+LEN-1}. */
8745 if (operand_equal_p (src, dest, 0))
8746 expr = len;
8747 else
8748 {
8749 tree srctype, desttype;
8750 unsigned int src_align, dest_align;
8751 tree off0;
8752
8753 if (endp == 3)
8754 {
8755 src_align = get_pointer_alignment (src);
8756 dest_align = get_pointer_alignment (dest);
8757
8758 /* Both DEST and SRC must be pointer types.
8759 ??? This is what old code did. Is the testing for pointer types
8760 really mandatory?
8761
8762 If either SRC is readonly or length is 1, we can use memcpy. */
8763 if (!dest_align || !src_align)
8764 return NULL_TREE;
8765 if (readonly_data_expr (src)
8766 || (tree_fits_uhwi_p (len)
8767 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8768 >= tree_to_uhwi (len))))
8769 {
8770 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8771 if (!fn)
8772 return NULL_TREE;
8773 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8774 }
8775
8776 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8777 if (TREE_CODE (src) == ADDR_EXPR
8778 && TREE_CODE (dest) == ADDR_EXPR)
8779 {
8780 tree src_base, dest_base, fn;
8781 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8782 HOST_WIDE_INT size = -1;
8783 HOST_WIDE_INT maxsize = -1;
8784
8785 srcvar = TREE_OPERAND (src, 0);
8786 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8787 &size, &maxsize);
8788 destvar = TREE_OPERAND (dest, 0);
8789 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8790 &size, &maxsize);
8791 if (tree_fits_uhwi_p (len))
8792 maxsize = tree_to_uhwi (len);
8793 else
8794 maxsize = -1;
8795 src_offset /= BITS_PER_UNIT;
8796 dest_offset /= BITS_PER_UNIT;
8797 if (SSA_VAR_P (src_base)
8798 && SSA_VAR_P (dest_base))
8799 {
8800 if (operand_equal_p (src_base, dest_base, 0)
8801 && ranges_overlap_p (src_offset, maxsize,
8802 dest_offset, maxsize))
8803 return NULL_TREE;
8804 }
8805 else if (TREE_CODE (src_base) == MEM_REF
8806 && TREE_CODE (dest_base) == MEM_REF)
8807 {
8808 double_int off;
8809 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8810 TREE_OPERAND (dest_base, 0), 0))
8811 return NULL_TREE;
8812 off = mem_ref_offset (src_base) +
8813 double_int::from_shwi (src_offset);
8814 if (!off.fits_shwi ())
8815 return NULL_TREE;
8816 src_offset = off.low;
8817 off = mem_ref_offset (dest_base) +
8818 double_int::from_shwi (dest_offset);
8819 if (!off.fits_shwi ())
8820 return NULL_TREE;
8821 dest_offset = off.low;
8822 if (ranges_overlap_p (src_offset, maxsize,
8823 dest_offset, maxsize))
8824 return NULL_TREE;
8825 }
8826 else
8827 return NULL_TREE;
8828
8829 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8830 if (!fn)
8831 return NULL_TREE;
8832 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8833 }
8834
8835 /* If the destination and source do not alias optimize into
8836 memcpy as well. */
8837 if ((is_gimple_min_invariant (dest)
8838 || TREE_CODE (dest) == SSA_NAME)
8839 && (is_gimple_min_invariant (src)
8840 || TREE_CODE (src) == SSA_NAME))
8841 {
8842 ao_ref destr, srcr;
8843 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8844 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8845 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8846 {
8847 tree fn;
8848 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8849 if (!fn)
8850 return NULL_TREE;
8851 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8852 }
8853 }
8854
8855 return NULL_TREE;
8856 }
8857
8858 if (!tree_fits_shwi_p (len))
8859 return NULL_TREE;
8860 /* FIXME:
8861 This logic lose for arguments like (type *)malloc (sizeof (type)),
8862 since we strip the casts of up to VOID return value from malloc.
8863 Perhaps we ought to inherit type from non-VOID argument here? */
8864 STRIP_NOPS (src);
8865 STRIP_NOPS (dest);
8866 if (!POINTER_TYPE_P (TREE_TYPE (src))
8867 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8868 return NULL_TREE;
8869 /* In the following try to find a type that is most natural to be
8870 used for the memcpy source and destination and that allows
8871 the most optimization when memcpy is turned into a plain assignment
8872 using that type. In theory we could always use a char[len] type
8873 but that only gains us that the destination and source possibly
8874 no longer will have their address taken. */
8875 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8876 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8877 {
8878 tree tem = TREE_OPERAND (src, 0);
8879 STRIP_NOPS (tem);
8880 if (tem != TREE_OPERAND (src, 0))
8881 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8882 }
8883 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8884 {
8885 tree tem = TREE_OPERAND (dest, 0);
8886 STRIP_NOPS (tem);
8887 if (tem != TREE_OPERAND (dest, 0))
8888 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8889 }
8890 srctype = TREE_TYPE (TREE_TYPE (src));
8891 if (TREE_CODE (srctype) == ARRAY_TYPE
8892 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8893 {
8894 srctype = TREE_TYPE (srctype);
8895 STRIP_NOPS (src);
8896 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8897 }
8898 desttype = TREE_TYPE (TREE_TYPE (dest));
8899 if (TREE_CODE (desttype) == ARRAY_TYPE
8900 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8901 {
8902 desttype = TREE_TYPE (desttype);
8903 STRIP_NOPS (dest);
8904 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8905 }
8906 if (TREE_ADDRESSABLE (srctype)
8907 || TREE_ADDRESSABLE (desttype))
8908 return NULL_TREE;
8909
8910 /* Make sure we are not copying using a floating-point mode or
8911 a type whose size possibly does not match its precision. */
8912 if (FLOAT_MODE_P (TYPE_MODE (desttype))
8913 || TREE_CODE (desttype) == BOOLEAN_TYPE
8914 || TREE_CODE (desttype) == ENUMERAL_TYPE)
8915 {
8916 /* A more suitable int_mode_for_mode would return a vector
8917 integer mode for a vector float mode or a integer complex
8918 mode for a float complex mode if there isn't a regular
8919 integer mode covering the mode of desttype. */
8920 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (desttype));
8921 if (mode == BLKmode)
8922 desttype = NULL_TREE;
8923 else
8924 desttype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode),
8925 1);
8926 }
8927 if (FLOAT_MODE_P (TYPE_MODE (srctype))
8928 || TREE_CODE (srctype) == BOOLEAN_TYPE
8929 || TREE_CODE (srctype) == ENUMERAL_TYPE)
8930 {
8931 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (srctype));
8932 if (mode == BLKmode)
8933 srctype = NULL_TREE;
8934 else
8935 srctype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode),
8936 1);
8937 }
8938 if (!srctype)
8939 srctype = desttype;
8940 if (!desttype)
8941 desttype = srctype;
8942 if (!srctype)
8943 return NULL_TREE;
8944
8945 src_align = get_pointer_alignment (src);
8946 dest_align = get_pointer_alignment (dest);
8947 if (dest_align < TYPE_ALIGN (desttype)
8948 || src_align < TYPE_ALIGN (srctype))
8949 return NULL_TREE;
8950
8951 if (!ignore)
8952 dest = builtin_save_expr (dest);
8953
8954 /* Build accesses at offset zero with a ref-all character type. */
8955 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8956 ptr_mode, true), 0);
8957
8958 destvar = dest;
8959 STRIP_NOPS (destvar);
8960 if (TREE_CODE (destvar) == ADDR_EXPR
8961 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8962 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8963 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8964 else
8965 destvar = NULL_TREE;
8966
8967 srcvar = src;
8968 STRIP_NOPS (srcvar);
8969 if (TREE_CODE (srcvar) == ADDR_EXPR
8970 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8971 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8972 {
8973 if (!destvar
8974 || src_align >= TYPE_ALIGN (desttype))
8975 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8976 srcvar, off0);
8977 else if (!STRICT_ALIGNMENT)
8978 {
8979 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8980 src_align);
8981 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8982 }
8983 else
8984 srcvar = NULL_TREE;
8985 }
8986 else
8987 srcvar = NULL_TREE;
8988
8989 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8990 return NULL_TREE;
8991
8992 if (srcvar == NULL_TREE)
8993 {
8994 STRIP_NOPS (src);
8995 if (src_align >= TYPE_ALIGN (desttype))
8996 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8997 else
8998 {
8999 if (STRICT_ALIGNMENT)
9000 return NULL_TREE;
9001 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9002 src_align);
9003 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9004 }
9005 }
9006 else if (destvar == NULL_TREE)
9007 {
9008 STRIP_NOPS (dest);
9009 if (dest_align >= TYPE_ALIGN (srctype))
9010 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9011 else
9012 {
9013 if (STRICT_ALIGNMENT)
9014 return NULL_TREE;
9015 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9016 dest_align);
9017 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9018 }
9019 }
9020
9021 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9022 }
9023
9024 if (ignore)
9025 return expr;
9026
9027 if (endp == 0 || endp == 3)
9028 return omit_one_operand_loc (loc, type, dest, expr);
9029
9030 if (expr == len)
9031 expr = NULL_TREE;
9032
9033 if (endp == 2)
9034 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9035 ssize_int (1));
9036
9037 dest = fold_build_pointer_plus_loc (loc, dest, len);
9038 dest = fold_convert_loc (loc, type, dest);
9039 if (expr)
9040 dest = omit_one_operand_loc (loc, type, dest, expr);
9041 return dest;
9042 }
9043
9044 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9045 If LEN is not NULL, it represents the length of the string to be
9046 copied. Return NULL_TREE if no simplification can be made. */
9047
9048 tree
9049 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9050 {
9051 tree fn;
9052
9053 if (!validate_arg (dest, POINTER_TYPE)
9054 || !validate_arg (src, POINTER_TYPE))
9055 return NULL_TREE;
9056
9057 /* If SRC and DEST are the same (and not volatile), return DEST. */
9058 if (operand_equal_p (src, dest, 0))
9059 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9060
9061 if (optimize_function_for_size_p (cfun))
9062 return NULL_TREE;
9063
9064 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9065 if (!fn)
9066 return NULL_TREE;
9067
9068 if (!len)
9069 {
9070 len = c_strlen (src, 1);
9071 if (! len || TREE_SIDE_EFFECTS (len))
9072 return NULL_TREE;
9073 }
9074
9075 len = fold_convert_loc (loc, size_type_node, len);
9076 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9077 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9078 build_call_expr_loc (loc, fn, 3, dest, src, len));
9079 }
9080
9081 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9082 Return NULL_TREE if no simplification can be made. */
9083
9084 static tree
9085 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9086 {
9087 tree fn, len, lenp1, call, type;
9088
9089 if (!validate_arg (dest, POINTER_TYPE)
9090 || !validate_arg (src, POINTER_TYPE))
9091 return NULL_TREE;
9092
9093 len = c_strlen (src, 1);
9094 if (!len
9095 || TREE_CODE (len) != INTEGER_CST)
9096 return NULL_TREE;
9097
9098 if (optimize_function_for_size_p (cfun)
9099 /* If length is zero it's small enough. */
9100 && !integer_zerop (len))
9101 return NULL_TREE;
9102
9103 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9104 if (!fn)
9105 return NULL_TREE;
9106
9107 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9108 fold_convert_loc (loc, size_type_node, len),
9109 build_int_cst (size_type_node, 1));
9110 /* We use dest twice in building our expression. Save it from
9111 multiple expansions. */
9112 dest = builtin_save_expr (dest);
9113 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9114
9115 type = TREE_TYPE (TREE_TYPE (fndecl));
9116 dest = fold_build_pointer_plus_loc (loc, dest, len);
9117 dest = fold_convert_loc (loc, type, dest);
9118 dest = omit_one_operand_loc (loc, type, dest, call);
9119 return dest;
9120 }
9121
9122 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9123 If SLEN is not NULL, it represents the length of the source string.
9124 Return NULL_TREE if no simplification can be made. */
9125
9126 tree
9127 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9128 tree src, tree len, tree slen)
9129 {
9130 tree fn;
9131
9132 if (!validate_arg (dest, POINTER_TYPE)
9133 || !validate_arg (src, POINTER_TYPE)
9134 || !validate_arg (len, INTEGER_TYPE))
9135 return NULL_TREE;
9136
9137 /* If the LEN parameter is zero, return DEST. */
9138 if (integer_zerop (len))
9139 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9140
9141 /* We can't compare slen with len as constants below if len is not a
9142 constant. */
9143 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9144 return NULL_TREE;
9145
9146 if (!slen)
9147 slen = c_strlen (src, 1);
9148
9149 /* Now, we must be passed a constant src ptr parameter. */
9150 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9151 return NULL_TREE;
9152
9153 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9154
9155 /* We do not support simplification of this case, though we do
9156 support it when expanding trees into RTL. */
9157 /* FIXME: generate a call to __builtin_memset. */
9158 if (tree_int_cst_lt (slen, len))
9159 return NULL_TREE;
9160
9161 /* OK transform into builtin memcpy. */
9162 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9163 if (!fn)
9164 return NULL_TREE;
9165
9166 len = fold_convert_loc (loc, size_type_node, len);
9167 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9168 build_call_expr_loc (loc, fn, 3, dest, src, len));
9169 }
9170
9171 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9172 arguments to the call, and TYPE is its return type.
9173 Return NULL_TREE if no simplification can be made. */
9174
9175 static tree
9176 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9177 {
9178 if (!validate_arg (arg1, POINTER_TYPE)
9179 || !validate_arg (arg2, INTEGER_TYPE)
9180 || !validate_arg (len, INTEGER_TYPE))
9181 return NULL_TREE;
9182 else
9183 {
9184 const char *p1;
9185
9186 if (TREE_CODE (arg2) != INTEGER_CST
9187 || !tree_fits_uhwi_p (len))
9188 return NULL_TREE;
9189
9190 p1 = c_getstr (arg1);
9191 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9192 {
9193 char c;
9194 const char *r;
9195 tree tem;
9196
9197 if (target_char_cast (arg2, &c))
9198 return NULL_TREE;
9199
9200 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9201
9202 if (r == NULL)
9203 return build_int_cst (TREE_TYPE (arg1), 0);
9204
9205 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9206 return fold_convert_loc (loc, type, tem);
9207 }
9208 return NULL_TREE;
9209 }
9210 }
9211
9212 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9213 Return NULL_TREE if no simplification can be made. */
9214
9215 static tree
9216 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9217 {
9218 const char *p1, *p2;
9219
9220 if (!validate_arg (arg1, POINTER_TYPE)
9221 || !validate_arg (arg2, POINTER_TYPE)
9222 || !validate_arg (len, INTEGER_TYPE))
9223 return NULL_TREE;
9224
9225 /* If the LEN parameter is zero, return zero. */
9226 if (integer_zerop (len))
9227 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9228 arg1, arg2);
9229
9230 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9231 if (operand_equal_p (arg1, arg2, 0))
9232 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9233
9234 p1 = c_getstr (arg1);
9235 p2 = c_getstr (arg2);
9236
9237 /* If all arguments are constant, and the value of len is not greater
9238 than the lengths of arg1 and arg2, evaluate at compile-time. */
9239 if (tree_fits_uhwi_p (len) && p1 && p2
9240 && compare_tree_int (len, strlen (p1) + 1) <= 0
9241 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9242 {
9243 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9244
9245 if (r > 0)
9246 return integer_one_node;
9247 else if (r < 0)
9248 return integer_minus_one_node;
9249 else
9250 return integer_zero_node;
9251 }
9252
9253 /* If len parameter is one, return an expression corresponding to
9254 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9255 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9256 {
9257 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9258 tree cst_uchar_ptr_node
9259 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9260
9261 tree ind1
9262 = fold_convert_loc (loc, integer_type_node,
9263 build1 (INDIRECT_REF, cst_uchar_node,
9264 fold_convert_loc (loc,
9265 cst_uchar_ptr_node,
9266 arg1)));
9267 tree ind2
9268 = fold_convert_loc (loc, integer_type_node,
9269 build1 (INDIRECT_REF, cst_uchar_node,
9270 fold_convert_loc (loc,
9271 cst_uchar_ptr_node,
9272 arg2)));
9273 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9274 }
9275
9276 return NULL_TREE;
9277 }
9278
9279 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9280 Return NULL_TREE if no simplification can be made. */
9281
9282 static tree
9283 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9284 {
9285 const char *p1, *p2;
9286
9287 if (!validate_arg (arg1, POINTER_TYPE)
9288 || !validate_arg (arg2, POINTER_TYPE))
9289 return NULL_TREE;
9290
9291 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9292 if (operand_equal_p (arg1, arg2, 0))
9293 return integer_zero_node;
9294
9295 p1 = c_getstr (arg1);
9296 p2 = c_getstr (arg2);
9297
9298 if (p1 && p2)
9299 {
9300 const int i = strcmp (p1, p2);
9301 if (i < 0)
9302 return integer_minus_one_node;
9303 else if (i > 0)
9304 return integer_one_node;
9305 else
9306 return integer_zero_node;
9307 }
9308
9309 /* If the second arg is "", return *(const unsigned char*)arg1. */
9310 if (p2 && *p2 == '\0')
9311 {
9312 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9313 tree cst_uchar_ptr_node
9314 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9315
9316 return fold_convert_loc (loc, integer_type_node,
9317 build1 (INDIRECT_REF, cst_uchar_node,
9318 fold_convert_loc (loc,
9319 cst_uchar_ptr_node,
9320 arg1)));
9321 }
9322
9323 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9324 if (p1 && *p1 == '\0')
9325 {
9326 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9327 tree cst_uchar_ptr_node
9328 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9329
9330 tree temp
9331 = fold_convert_loc (loc, integer_type_node,
9332 build1 (INDIRECT_REF, cst_uchar_node,
9333 fold_convert_loc (loc,
9334 cst_uchar_ptr_node,
9335 arg2)));
9336 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9337 }
9338
9339 return NULL_TREE;
9340 }
9341
9342 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9343 Return NULL_TREE if no simplification can be made. */
9344
9345 static tree
9346 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9347 {
9348 const char *p1, *p2;
9349
9350 if (!validate_arg (arg1, POINTER_TYPE)
9351 || !validate_arg (arg2, POINTER_TYPE)
9352 || !validate_arg (len, INTEGER_TYPE))
9353 return NULL_TREE;
9354
9355 /* If the LEN parameter is zero, return zero. */
9356 if (integer_zerop (len))
9357 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9358 arg1, arg2);
9359
9360 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9361 if (operand_equal_p (arg1, arg2, 0))
9362 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9363
9364 p1 = c_getstr (arg1);
9365 p2 = c_getstr (arg2);
9366
9367 if (tree_fits_uhwi_p (len) && p1 && p2)
9368 {
9369 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9370 if (i > 0)
9371 return integer_one_node;
9372 else if (i < 0)
9373 return integer_minus_one_node;
9374 else
9375 return integer_zero_node;
9376 }
9377
9378 /* If the second arg is "", and the length is greater than zero,
9379 return *(const unsigned char*)arg1. */
9380 if (p2 && *p2 == '\0'
9381 && TREE_CODE (len) == INTEGER_CST
9382 && tree_int_cst_sgn (len) == 1)
9383 {
9384 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9385 tree cst_uchar_ptr_node
9386 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9387
9388 return fold_convert_loc (loc, integer_type_node,
9389 build1 (INDIRECT_REF, cst_uchar_node,
9390 fold_convert_loc (loc,
9391 cst_uchar_ptr_node,
9392 arg1)));
9393 }
9394
9395 /* If the first arg is "", and the length is greater than zero,
9396 return -*(const unsigned char*)arg2. */
9397 if (p1 && *p1 == '\0'
9398 && TREE_CODE (len) == INTEGER_CST
9399 && tree_int_cst_sgn (len) == 1)
9400 {
9401 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9402 tree cst_uchar_ptr_node
9403 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9404
9405 tree temp = fold_convert_loc (loc, integer_type_node,
9406 build1 (INDIRECT_REF, cst_uchar_node,
9407 fold_convert_loc (loc,
9408 cst_uchar_ptr_node,
9409 arg2)));
9410 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9411 }
9412
9413 /* If len parameter is one, return an expression corresponding to
9414 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9415 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9416 {
9417 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9418 tree cst_uchar_ptr_node
9419 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9420
9421 tree ind1 = fold_convert_loc (loc, integer_type_node,
9422 build1 (INDIRECT_REF, cst_uchar_node,
9423 fold_convert_loc (loc,
9424 cst_uchar_ptr_node,
9425 arg1)));
9426 tree ind2 = fold_convert_loc (loc, integer_type_node,
9427 build1 (INDIRECT_REF, cst_uchar_node,
9428 fold_convert_loc (loc,
9429 cst_uchar_ptr_node,
9430 arg2)));
9431 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9432 }
9433
9434 return NULL_TREE;
9435 }
9436
9437 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9438 ARG. Return NULL_TREE if no simplification can be made. */
9439
9440 static tree
9441 fold_builtin_signbit (location_t loc, tree arg, tree type)
9442 {
9443 if (!validate_arg (arg, REAL_TYPE))
9444 return NULL_TREE;
9445
9446 /* If ARG is a compile-time constant, determine the result. */
9447 if (TREE_CODE (arg) == REAL_CST
9448 && !TREE_OVERFLOW (arg))
9449 {
9450 REAL_VALUE_TYPE c;
9451
9452 c = TREE_REAL_CST (arg);
9453 return (REAL_VALUE_NEGATIVE (c)
9454 ? build_one_cst (type)
9455 : build_zero_cst (type));
9456 }
9457
9458 /* If ARG is non-negative, the result is always zero. */
9459 if (tree_expr_nonnegative_p (arg))
9460 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9461
9462 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9463 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9464 return fold_convert (type,
9465 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9466 build_real (TREE_TYPE (arg), dconst0)));
9467
9468 return NULL_TREE;
9469 }
9470
9471 /* Fold function call to builtin copysign, copysignf or copysignl with
9472 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9473 be made. */
9474
9475 static tree
9476 fold_builtin_copysign (location_t loc, tree fndecl,
9477 tree arg1, tree arg2, tree type)
9478 {
9479 tree tem;
9480
9481 if (!validate_arg (arg1, REAL_TYPE)
9482 || !validate_arg (arg2, REAL_TYPE))
9483 return NULL_TREE;
9484
9485 /* copysign(X,X) is X. */
9486 if (operand_equal_p (arg1, arg2, 0))
9487 return fold_convert_loc (loc, type, arg1);
9488
9489 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9490 if (TREE_CODE (arg1) == REAL_CST
9491 && TREE_CODE (arg2) == REAL_CST
9492 && !TREE_OVERFLOW (arg1)
9493 && !TREE_OVERFLOW (arg2))
9494 {
9495 REAL_VALUE_TYPE c1, c2;
9496
9497 c1 = TREE_REAL_CST (arg1);
9498 c2 = TREE_REAL_CST (arg2);
9499 /* c1.sign := c2.sign. */
9500 real_copysign (&c1, &c2);
9501 return build_real (type, c1);
9502 }
9503
9504 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9505 Remember to evaluate Y for side-effects. */
9506 if (tree_expr_nonnegative_p (arg2))
9507 return omit_one_operand_loc (loc, type,
9508 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9509 arg2);
9510
9511 /* Strip sign changing operations for the first argument. */
9512 tem = fold_strip_sign_ops (arg1);
9513 if (tem)
9514 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9515
9516 return NULL_TREE;
9517 }
9518
9519 /* Fold a call to builtin isascii with argument ARG. */
9520
9521 static tree
9522 fold_builtin_isascii (location_t loc, tree arg)
9523 {
9524 if (!validate_arg (arg, INTEGER_TYPE))
9525 return NULL_TREE;
9526 else
9527 {
9528 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9529 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9530 build_int_cst (integer_type_node,
9531 ~ (unsigned HOST_WIDE_INT) 0x7f));
9532 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9533 arg, integer_zero_node);
9534 }
9535 }
9536
9537 /* Fold a call to builtin toascii with argument ARG. */
9538
9539 static tree
9540 fold_builtin_toascii (location_t loc, tree arg)
9541 {
9542 if (!validate_arg (arg, INTEGER_TYPE))
9543 return NULL_TREE;
9544
9545 /* Transform toascii(c) -> (c & 0x7f). */
9546 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9547 build_int_cst (integer_type_node, 0x7f));
9548 }
9549
9550 /* Fold a call to builtin isdigit with argument ARG. */
9551
9552 static tree
9553 fold_builtin_isdigit (location_t loc, tree arg)
9554 {
9555 if (!validate_arg (arg, INTEGER_TYPE))
9556 return NULL_TREE;
9557 else
9558 {
9559 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9560 /* According to the C standard, isdigit is unaffected by locale.
9561 However, it definitely is affected by the target character set. */
9562 unsigned HOST_WIDE_INT target_digit0
9563 = lang_hooks.to_target_charset ('0');
9564
9565 if (target_digit0 == 0)
9566 return NULL_TREE;
9567
9568 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9569 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9570 build_int_cst (unsigned_type_node, target_digit0));
9571 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9572 build_int_cst (unsigned_type_node, 9));
9573 }
9574 }
9575
9576 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9577
9578 static tree
9579 fold_builtin_fabs (location_t loc, tree arg, tree type)
9580 {
9581 if (!validate_arg (arg, REAL_TYPE))
9582 return NULL_TREE;
9583
9584 arg = fold_convert_loc (loc, type, arg);
9585 if (TREE_CODE (arg) == REAL_CST)
9586 return fold_abs_const (arg, type);
9587 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9588 }
9589
9590 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9591
9592 static tree
9593 fold_builtin_abs (location_t loc, tree arg, tree type)
9594 {
9595 if (!validate_arg (arg, INTEGER_TYPE))
9596 return NULL_TREE;
9597
9598 arg = fold_convert_loc (loc, type, arg);
9599 if (TREE_CODE (arg) == INTEGER_CST)
9600 return fold_abs_const (arg, type);
9601 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9602 }
9603
9604 /* Fold a fma operation with arguments ARG[012]. */
9605
9606 tree
9607 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9608 tree type, tree arg0, tree arg1, tree arg2)
9609 {
9610 if (TREE_CODE (arg0) == REAL_CST
9611 && TREE_CODE (arg1) == REAL_CST
9612 && TREE_CODE (arg2) == REAL_CST)
9613 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9614
9615 return NULL_TREE;
9616 }
9617
9618 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9619
9620 static tree
9621 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9622 {
9623 if (validate_arg (arg0, REAL_TYPE)
9624 && validate_arg (arg1, REAL_TYPE)
9625 && validate_arg (arg2, REAL_TYPE))
9626 {
9627 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9628 if (tem)
9629 return tem;
9630
9631 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9632 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9633 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9634 }
9635 return NULL_TREE;
9636 }
9637
9638 /* Fold a call to builtin fmin or fmax. */
9639
9640 static tree
9641 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9642 tree type, bool max)
9643 {
9644 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9645 {
9646 /* Calculate the result when the argument is a constant. */
9647 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9648
9649 if (res)
9650 return res;
9651
9652 /* If either argument is NaN, return the other one. Avoid the
9653 transformation if we get (and honor) a signalling NaN. Using
9654 omit_one_operand() ensures we create a non-lvalue. */
9655 if (TREE_CODE (arg0) == REAL_CST
9656 && real_isnan (&TREE_REAL_CST (arg0))
9657 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9658 || ! TREE_REAL_CST (arg0).signalling))
9659 return omit_one_operand_loc (loc, type, arg1, arg0);
9660 if (TREE_CODE (arg1) == REAL_CST
9661 && real_isnan (&TREE_REAL_CST (arg1))
9662 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9663 || ! TREE_REAL_CST (arg1).signalling))
9664 return omit_one_operand_loc (loc, type, arg0, arg1);
9665
9666 /* Transform fmin/fmax(x,x) -> x. */
9667 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9668 return omit_one_operand_loc (loc, type, arg0, arg1);
9669
9670 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9671 functions to return the numeric arg if the other one is NaN.
9672 These tree codes don't honor that, so only transform if
9673 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9674 handled, so we don't have to worry about it either. */
9675 if (flag_finite_math_only)
9676 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9677 fold_convert_loc (loc, type, arg0),
9678 fold_convert_loc (loc, type, arg1));
9679 }
9680 return NULL_TREE;
9681 }
9682
9683 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9684
9685 static tree
9686 fold_builtin_carg (location_t loc, tree arg, tree type)
9687 {
9688 if (validate_arg (arg, COMPLEX_TYPE)
9689 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9690 {
9691 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9692
9693 if (atan2_fn)
9694 {
9695 tree new_arg = builtin_save_expr (arg);
9696 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9697 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9698 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9699 }
9700 }
9701
9702 return NULL_TREE;
9703 }
9704
9705 /* Fold a call to builtin logb/ilogb. */
9706
9707 static tree
9708 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9709 {
9710 if (! validate_arg (arg, REAL_TYPE))
9711 return NULL_TREE;
9712
9713 STRIP_NOPS (arg);
9714
9715 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9716 {
9717 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9718
9719 switch (value->cl)
9720 {
9721 case rvc_nan:
9722 case rvc_inf:
9723 /* If arg is Inf or NaN and we're logb, return it. */
9724 if (TREE_CODE (rettype) == REAL_TYPE)
9725 {
9726 /* For logb(-Inf) we have to return +Inf. */
9727 if (real_isinf (value) && real_isneg (value))
9728 {
9729 REAL_VALUE_TYPE tem;
9730 real_inf (&tem);
9731 return build_real (rettype, tem);
9732 }
9733 return fold_convert_loc (loc, rettype, arg);
9734 }
9735 /* Fall through... */
9736 case rvc_zero:
9737 /* Zero may set errno and/or raise an exception for logb, also
9738 for ilogb we don't know FP_ILOGB0. */
9739 return NULL_TREE;
9740 case rvc_normal:
9741 /* For normal numbers, proceed iff radix == 2. In GCC,
9742 normalized significands are in the range [0.5, 1.0). We
9743 want the exponent as if they were [1.0, 2.0) so get the
9744 exponent and subtract 1. */
9745 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9746 return fold_convert_loc (loc, rettype,
9747 build_int_cst (integer_type_node,
9748 REAL_EXP (value)-1));
9749 break;
9750 }
9751 }
9752
9753 return NULL_TREE;
9754 }
9755
9756 /* Fold a call to builtin significand, if radix == 2. */
9757
9758 static tree
9759 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9760 {
9761 if (! validate_arg (arg, REAL_TYPE))
9762 return NULL_TREE;
9763
9764 STRIP_NOPS (arg);
9765
9766 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9767 {
9768 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9769
9770 switch (value->cl)
9771 {
9772 case rvc_zero:
9773 case rvc_nan:
9774 case rvc_inf:
9775 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9776 return fold_convert_loc (loc, rettype, arg);
9777 case rvc_normal:
9778 /* For normal numbers, proceed iff radix == 2. */
9779 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9780 {
9781 REAL_VALUE_TYPE result = *value;
9782 /* In GCC, normalized significands are in the range [0.5,
9783 1.0). We want them to be [1.0, 2.0) so set the
9784 exponent to 1. */
9785 SET_REAL_EXP (&result, 1);
9786 return build_real (rettype, result);
9787 }
9788 break;
9789 }
9790 }
9791
9792 return NULL_TREE;
9793 }
9794
9795 /* Fold a call to builtin frexp, we can assume the base is 2. */
9796
9797 static tree
9798 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9799 {
9800 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9801 return NULL_TREE;
9802
9803 STRIP_NOPS (arg0);
9804
9805 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9806 return NULL_TREE;
9807
9808 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9809
9810 /* Proceed if a valid pointer type was passed in. */
9811 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9812 {
9813 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9814 tree frac, exp;
9815
9816 switch (value->cl)
9817 {
9818 case rvc_zero:
9819 /* For +-0, return (*exp = 0, +-0). */
9820 exp = integer_zero_node;
9821 frac = arg0;
9822 break;
9823 case rvc_nan:
9824 case rvc_inf:
9825 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9826 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9827 case rvc_normal:
9828 {
9829 /* Since the frexp function always expects base 2, and in
9830 GCC normalized significands are already in the range
9831 [0.5, 1.0), we have exactly what frexp wants. */
9832 REAL_VALUE_TYPE frac_rvt = *value;
9833 SET_REAL_EXP (&frac_rvt, 0);
9834 frac = build_real (rettype, frac_rvt);
9835 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9836 }
9837 break;
9838 default:
9839 gcc_unreachable ();
9840 }
9841
9842 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9843 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9844 TREE_SIDE_EFFECTS (arg1) = 1;
9845 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9846 }
9847
9848 return NULL_TREE;
9849 }
9850
9851 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9852 then we can assume the base is two. If it's false, then we have to
9853 check the mode of the TYPE parameter in certain cases. */
9854
9855 static tree
9856 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9857 tree type, bool ldexp)
9858 {
9859 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9860 {
9861 STRIP_NOPS (arg0);
9862 STRIP_NOPS (arg1);
9863
9864 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9865 if (real_zerop (arg0) || integer_zerop (arg1)
9866 || (TREE_CODE (arg0) == REAL_CST
9867 && !real_isfinite (&TREE_REAL_CST (arg0))))
9868 return omit_one_operand_loc (loc, type, arg0, arg1);
9869
9870 /* If both arguments are constant, then try to evaluate it. */
9871 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9872 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9873 && tree_fits_shwi_p (arg1))
9874 {
9875 /* Bound the maximum adjustment to twice the range of the
9876 mode's valid exponents. Use abs to ensure the range is
9877 positive as a sanity check. */
9878 const long max_exp_adj = 2 *
9879 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9880 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9881
9882 /* Get the user-requested adjustment. */
9883 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9884
9885 /* The requested adjustment must be inside this range. This
9886 is a preliminary cap to avoid things like overflow, we
9887 may still fail to compute the result for other reasons. */
9888 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9889 {
9890 REAL_VALUE_TYPE initial_result;
9891
9892 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9893
9894 /* Ensure we didn't overflow. */
9895 if (! real_isinf (&initial_result))
9896 {
9897 const REAL_VALUE_TYPE trunc_result
9898 = real_value_truncate (TYPE_MODE (type), initial_result);
9899
9900 /* Only proceed if the target mode can hold the
9901 resulting value. */
9902 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9903 return build_real (type, trunc_result);
9904 }
9905 }
9906 }
9907 }
9908
9909 return NULL_TREE;
9910 }
9911
9912 /* Fold a call to builtin modf. */
9913
9914 static tree
9915 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9916 {
9917 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9918 return NULL_TREE;
9919
9920 STRIP_NOPS (arg0);
9921
9922 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9923 return NULL_TREE;
9924
9925 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9926
9927 /* Proceed if a valid pointer type was passed in. */
9928 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9929 {
9930 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9931 REAL_VALUE_TYPE trunc, frac;
9932
9933 switch (value->cl)
9934 {
9935 case rvc_nan:
9936 case rvc_zero:
9937 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9938 trunc = frac = *value;
9939 break;
9940 case rvc_inf:
9941 /* For +-Inf, return (*arg1 = arg0, +-0). */
9942 frac = dconst0;
9943 frac.sign = value->sign;
9944 trunc = *value;
9945 break;
9946 case rvc_normal:
9947 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9948 real_trunc (&trunc, VOIDmode, value);
9949 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9950 /* If the original number was negative and already
9951 integral, then the fractional part is -0.0. */
9952 if (value->sign && frac.cl == rvc_zero)
9953 frac.sign = value->sign;
9954 break;
9955 }
9956
9957 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9958 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9959 build_real (rettype, trunc));
9960 TREE_SIDE_EFFECTS (arg1) = 1;
9961 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9962 build_real (rettype, frac));
9963 }
9964
9965 return NULL_TREE;
9966 }
9967
9968 /* Given a location LOC, an interclass builtin function decl FNDECL
9969 and its single argument ARG, return an folded expression computing
9970 the same, or NULL_TREE if we either couldn't or didn't want to fold
9971 (the latter happen if there's an RTL instruction available). */
9972
9973 static tree
9974 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9975 {
9976 enum machine_mode mode;
9977
9978 if (!validate_arg (arg, REAL_TYPE))
9979 return NULL_TREE;
9980
9981 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9982 return NULL_TREE;
9983
9984 mode = TYPE_MODE (TREE_TYPE (arg));
9985
9986 /* If there is no optab, try generic code. */
9987 switch (DECL_FUNCTION_CODE (fndecl))
9988 {
9989 tree result;
9990
9991 CASE_FLT_FN (BUILT_IN_ISINF):
9992 {
9993 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9994 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9995 tree const type = TREE_TYPE (arg);
9996 REAL_VALUE_TYPE r;
9997 char buf[128];
9998
9999 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10000 real_from_string (&r, buf);
10001 result = build_call_expr (isgr_fn, 2,
10002 fold_build1_loc (loc, ABS_EXPR, type, arg),
10003 build_real (type, r));
10004 return result;
10005 }
10006 CASE_FLT_FN (BUILT_IN_FINITE):
10007 case BUILT_IN_ISFINITE:
10008 {
10009 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10010 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10011 tree const type = TREE_TYPE (arg);
10012 REAL_VALUE_TYPE r;
10013 char buf[128];
10014
10015 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10016 real_from_string (&r, buf);
10017 result = build_call_expr (isle_fn, 2,
10018 fold_build1_loc (loc, ABS_EXPR, type, arg),
10019 build_real (type, r));
10020 /*result = fold_build2_loc (loc, UNGT_EXPR,
10021 TREE_TYPE (TREE_TYPE (fndecl)),
10022 fold_build1_loc (loc, ABS_EXPR, type, arg),
10023 build_real (type, r));
10024 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10025 TREE_TYPE (TREE_TYPE (fndecl)),
10026 result);*/
10027 return result;
10028 }
10029 case BUILT_IN_ISNORMAL:
10030 {
10031 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10032 islessequal(fabs(x),DBL_MAX). */
10033 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10034 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10035 tree const type = TREE_TYPE (arg);
10036 REAL_VALUE_TYPE rmax, rmin;
10037 char buf[128];
10038
10039 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10040 real_from_string (&rmax, buf);
10041 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10042 real_from_string (&rmin, buf);
10043 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10044 result = build_call_expr (isle_fn, 2, arg,
10045 build_real (type, rmax));
10046 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10047 build_call_expr (isge_fn, 2, arg,
10048 build_real (type, rmin)));
10049 return result;
10050 }
10051 default:
10052 break;
10053 }
10054
10055 return NULL_TREE;
10056 }
10057
10058 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10059 ARG is the argument for the call. */
10060
10061 static tree
10062 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10063 {
10064 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10065 REAL_VALUE_TYPE r;
10066
10067 if (!validate_arg (arg, REAL_TYPE))
10068 return NULL_TREE;
10069
10070 switch (builtin_index)
10071 {
10072 case BUILT_IN_ISINF:
10073 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10074 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10075
10076 if (TREE_CODE (arg) == REAL_CST)
10077 {
10078 r = TREE_REAL_CST (arg);
10079 if (real_isinf (&r))
10080 return real_compare (GT_EXPR, &r, &dconst0)
10081 ? integer_one_node : integer_minus_one_node;
10082 else
10083 return integer_zero_node;
10084 }
10085
10086 return NULL_TREE;
10087
10088 case BUILT_IN_ISINF_SIGN:
10089 {
10090 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10091 /* In a boolean context, GCC will fold the inner COND_EXPR to
10092 1. So e.g. "if (isinf_sign(x))" would be folded to just
10093 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10094 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10095 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10096 tree tmp = NULL_TREE;
10097
10098 arg = builtin_save_expr (arg);
10099
10100 if (signbit_fn && isinf_fn)
10101 {
10102 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10103 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10104
10105 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10106 signbit_call, integer_zero_node);
10107 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10108 isinf_call, integer_zero_node);
10109
10110 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10111 integer_minus_one_node, integer_one_node);
10112 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10113 isinf_call, tmp,
10114 integer_zero_node);
10115 }
10116
10117 return tmp;
10118 }
10119
10120 case BUILT_IN_ISFINITE:
10121 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10122 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10123 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10124
10125 if (TREE_CODE (arg) == REAL_CST)
10126 {
10127 r = TREE_REAL_CST (arg);
10128 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10129 }
10130
10131 return NULL_TREE;
10132
10133 case BUILT_IN_ISNAN:
10134 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10135 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10136
10137 if (TREE_CODE (arg) == REAL_CST)
10138 {
10139 r = TREE_REAL_CST (arg);
10140 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10141 }
10142
10143 arg = builtin_save_expr (arg);
10144 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10145
10146 default:
10147 gcc_unreachable ();
10148 }
10149 }
10150
10151 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10152 This builtin will generate code to return the appropriate floating
10153 point classification depending on the value of the floating point
10154 number passed in. The possible return values must be supplied as
10155 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10156 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10157 one floating point argument which is "type generic". */
10158
10159 static tree
10160 fold_builtin_fpclassify (location_t loc, tree exp)
10161 {
10162 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10163 arg, type, res, tmp;
10164 enum machine_mode mode;
10165 REAL_VALUE_TYPE r;
10166 char buf[128];
10167
10168 /* Verify the required arguments in the original call. */
10169 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10170 INTEGER_TYPE, INTEGER_TYPE,
10171 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10172 return NULL_TREE;
10173
10174 fp_nan = CALL_EXPR_ARG (exp, 0);
10175 fp_infinite = CALL_EXPR_ARG (exp, 1);
10176 fp_normal = CALL_EXPR_ARG (exp, 2);
10177 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10178 fp_zero = CALL_EXPR_ARG (exp, 4);
10179 arg = CALL_EXPR_ARG (exp, 5);
10180 type = TREE_TYPE (arg);
10181 mode = TYPE_MODE (type);
10182 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10183
10184 /* fpclassify(x) ->
10185 isnan(x) ? FP_NAN :
10186 (fabs(x) == Inf ? FP_INFINITE :
10187 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10188 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10189
10190 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10191 build_real (type, dconst0));
10192 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10193 tmp, fp_zero, fp_subnormal);
10194
10195 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10196 real_from_string (&r, buf);
10197 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10198 arg, build_real (type, r));
10199 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10200
10201 if (HONOR_INFINITIES (mode))
10202 {
10203 real_inf (&r);
10204 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10205 build_real (type, r));
10206 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10207 fp_infinite, res);
10208 }
10209
10210 if (HONOR_NANS (mode))
10211 {
10212 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10213 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10214 }
10215
10216 return res;
10217 }
10218
10219 /* Fold a call to an unordered comparison function such as
10220 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10221 being called and ARG0 and ARG1 are the arguments for the call.
10222 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10223 the opposite of the desired result. UNORDERED_CODE is used
10224 for modes that can hold NaNs and ORDERED_CODE is used for
10225 the rest. */
10226
10227 static tree
10228 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10229 enum tree_code unordered_code,
10230 enum tree_code ordered_code)
10231 {
10232 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10233 enum tree_code code;
10234 tree type0, type1;
10235 enum tree_code code0, code1;
10236 tree cmp_type = NULL_TREE;
10237
10238 type0 = TREE_TYPE (arg0);
10239 type1 = TREE_TYPE (arg1);
10240
10241 code0 = TREE_CODE (type0);
10242 code1 = TREE_CODE (type1);
10243
10244 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10245 /* Choose the wider of two real types. */
10246 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10247 ? type0 : type1;
10248 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10249 cmp_type = type0;
10250 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10251 cmp_type = type1;
10252
10253 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10254 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10255
10256 if (unordered_code == UNORDERED_EXPR)
10257 {
10258 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10259 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10260 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10261 }
10262
10263 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10264 : ordered_code;
10265 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10266 fold_build2_loc (loc, code, type, arg0, arg1));
10267 }
10268
10269 /* Fold a call to built-in function FNDECL with 0 arguments.
10270 IGNORE is true if the result of the function call is ignored. This
10271 function returns NULL_TREE if no simplification was possible. */
10272
10273 static tree
10274 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10275 {
10276 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10277 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10278 switch (fcode)
10279 {
10280 CASE_FLT_FN (BUILT_IN_INF):
10281 case BUILT_IN_INFD32:
10282 case BUILT_IN_INFD64:
10283 case BUILT_IN_INFD128:
10284 return fold_builtin_inf (loc, type, true);
10285
10286 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10287 return fold_builtin_inf (loc, type, false);
10288
10289 case BUILT_IN_CLASSIFY_TYPE:
10290 return fold_builtin_classify_type (NULL_TREE);
10291
10292 case BUILT_IN_UNREACHABLE:
10293 if (flag_sanitize & SANITIZE_UNREACHABLE
10294 && (current_function_decl == NULL
10295 || !lookup_attribute ("no_sanitize_undefined",
10296 DECL_ATTRIBUTES (current_function_decl))))
10297 return ubsan_instrument_unreachable (loc);
10298 break;
10299
10300 default:
10301 break;
10302 }
10303 return NULL_TREE;
10304 }
10305
10306 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10307 IGNORE is true if the result of the function call is ignored. This
10308 function returns NULL_TREE if no simplification was possible. */
10309
10310 static tree
10311 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10312 {
10313 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10314 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10315 switch (fcode)
10316 {
10317 case BUILT_IN_CONSTANT_P:
10318 {
10319 tree val = fold_builtin_constant_p (arg0);
10320
10321 /* Gimplification will pull the CALL_EXPR for the builtin out of
10322 an if condition. When not optimizing, we'll not CSE it back.
10323 To avoid link error types of regressions, return false now. */
10324 if (!val && !optimize)
10325 val = integer_zero_node;
10326
10327 return val;
10328 }
10329
10330 case BUILT_IN_CLASSIFY_TYPE:
10331 return fold_builtin_classify_type (arg0);
10332
10333 case BUILT_IN_STRLEN:
10334 return fold_builtin_strlen (loc, type, arg0);
10335
10336 CASE_FLT_FN (BUILT_IN_FABS):
10337 case BUILT_IN_FABSD32:
10338 case BUILT_IN_FABSD64:
10339 case BUILT_IN_FABSD128:
10340 return fold_builtin_fabs (loc, arg0, type);
10341
10342 case BUILT_IN_ABS:
10343 case BUILT_IN_LABS:
10344 case BUILT_IN_LLABS:
10345 case BUILT_IN_IMAXABS:
10346 return fold_builtin_abs (loc, arg0, type);
10347
10348 CASE_FLT_FN (BUILT_IN_CONJ):
10349 if (validate_arg (arg0, COMPLEX_TYPE)
10350 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10351 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10352 break;
10353
10354 CASE_FLT_FN (BUILT_IN_CREAL):
10355 if (validate_arg (arg0, COMPLEX_TYPE)
10356 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10357 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10358 break;
10359
10360 CASE_FLT_FN (BUILT_IN_CIMAG):
10361 if (validate_arg (arg0, COMPLEX_TYPE)
10362 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10363 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10364 break;
10365
10366 CASE_FLT_FN (BUILT_IN_CCOS):
10367 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10368
10369 CASE_FLT_FN (BUILT_IN_CCOSH):
10370 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10371
10372 CASE_FLT_FN (BUILT_IN_CPROJ):
10373 return fold_builtin_cproj (loc, arg0, type);
10374
10375 CASE_FLT_FN (BUILT_IN_CSIN):
10376 if (validate_arg (arg0, COMPLEX_TYPE)
10377 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10378 return do_mpc_arg1 (arg0, type, mpc_sin);
10379 break;
10380
10381 CASE_FLT_FN (BUILT_IN_CSINH):
10382 if (validate_arg (arg0, COMPLEX_TYPE)
10383 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10384 return do_mpc_arg1 (arg0, type, mpc_sinh);
10385 break;
10386
10387 CASE_FLT_FN (BUILT_IN_CTAN):
10388 if (validate_arg (arg0, COMPLEX_TYPE)
10389 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10390 return do_mpc_arg1 (arg0, type, mpc_tan);
10391 break;
10392
10393 CASE_FLT_FN (BUILT_IN_CTANH):
10394 if (validate_arg (arg0, COMPLEX_TYPE)
10395 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10396 return do_mpc_arg1 (arg0, type, mpc_tanh);
10397 break;
10398
10399 CASE_FLT_FN (BUILT_IN_CLOG):
10400 if (validate_arg (arg0, COMPLEX_TYPE)
10401 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10402 return do_mpc_arg1 (arg0, type, mpc_log);
10403 break;
10404
10405 CASE_FLT_FN (BUILT_IN_CSQRT):
10406 if (validate_arg (arg0, COMPLEX_TYPE)
10407 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10408 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10409 break;
10410
10411 CASE_FLT_FN (BUILT_IN_CASIN):
10412 if (validate_arg (arg0, COMPLEX_TYPE)
10413 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10414 return do_mpc_arg1 (arg0, type, mpc_asin);
10415 break;
10416
10417 CASE_FLT_FN (BUILT_IN_CACOS):
10418 if (validate_arg (arg0, COMPLEX_TYPE)
10419 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10420 return do_mpc_arg1 (arg0, type, mpc_acos);
10421 break;
10422
10423 CASE_FLT_FN (BUILT_IN_CATAN):
10424 if (validate_arg (arg0, COMPLEX_TYPE)
10425 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10426 return do_mpc_arg1 (arg0, type, mpc_atan);
10427 break;
10428
10429 CASE_FLT_FN (BUILT_IN_CASINH):
10430 if (validate_arg (arg0, COMPLEX_TYPE)
10431 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10432 return do_mpc_arg1 (arg0, type, mpc_asinh);
10433 break;
10434
10435 CASE_FLT_FN (BUILT_IN_CACOSH):
10436 if (validate_arg (arg0, COMPLEX_TYPE)
10437 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10438 return do_mpc_arg1 (arg0, type, mpc_acosh);
10439 break;
10440
10441 CASE_FLT_FN (BUILT_IN_CATANH):
10442 if (validate_arg (arg0, COMPLEX_TYPE)
10443 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10444 return do_mpc_arg1 (arg0, type, mpc_atanh);
10445 break;
10446
10447 CASE_FLT_FN (BUILT_IN_CABS):
10448 return fold_builtin_cabs (loc, arg0, type, fndecl);
10449
10450 CASE_FLT_FN (BUILT_IN_CARG):
10451 return fold_builtin_carg (loc, arg0, type);
10452
10453 CASE_FLT_FN (BUILT_IN_SQRT):
10454 return fold_builtin_sqrt (loc, arg0, type);
10455
10456 CASE_FLT_FN (BUILT_IN_CBRT):
10457 return fold_builtin_cbrt (loc, arg0, type);
10458
10459 CASE_FLT_FN (BUILT_IN_ASIN):
10460 if (validate_arg (arg0, REAL_TYPE))
10461 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10462 &dconstm1, &dconst1, true);
10463 break;
10464
10465 CASE_FLT_FN (BUILT_IN_ACOS):
10466 if (validate_arg (arg0, REAL_TYPE))
10467 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10468 &dconstm1, &dconst1, true);
10469 break;
10470
10471 CASE_FLT_FN (BUILT_IN_ATAN):
10472 if (validate_arg (arg0, REAL_TYPE))
10473 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10474 break;
10475
10476 CASE_FLT_FN (BUILT_IN_ASINH):
10477 if (validate_arg (arg0, REAL_TYPE))
10478 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10479 break;
10480
10481 CASE_FLT_FN (BUILT_IN_ACOSH):
10482 if (validate_arg (arg0, REAL_TYPE))
10483 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10484 &dconst1, NULL, true);
10485 break;
10486
10487 CASE_FLT_FN (BUILT_IN_ATANH):
10488 if (validate_arg (arg0, REAL_TYPE))
10489 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10490 &dconstm1, &dconst1, false);
10491 break;
10492
10493 CASE_FLT_FN (BUILT_IN_SIN):
10494 if (validate_arg (arg0, REAL_TYPE))
10495 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10496 break;
10497
10498 CASE_FLT_FN (BUILT_IN_COS):
10499 return fold_builtin_cos (loc, arg0, type, fndecl);
10500
10501 CASE_FLT_FN (BUILT_IN_TAN):
10502 return fold_builtin_tan (arg0, type);
10503
10504 CASE_FLT_FN (BUILT_IN_CEXP):
10505 return fold_builtin_cexp (loc, arg0, type);
10506
10507 CASE_FLT_FN (BUILT_IN_CEXPI):
10508 if (validate_arg (arg0, REAL_TYPE))
10509 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10510 break;
10511
10512 CASE_FLT_FN (BUILT_IN_SINH):
10513 if (validate_arg (arg0, REAL_TYPE))
10514 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10515 break;
10516
10517 CASE_FLT_FN (BUILT_IN_COSH):
10518 return fold_builtin_cosh (loc, arg0, type, fndecl);
10519
10520 CASE_FLT_FN (BUILT_IN_TANH):
10521 if (validate_arg (arg0, REAL_TYPE))
10522 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10523 break;
10524
10525 CASE_FLT_FN (BUILT_IN_ERF):
10526 if (validate_arg (arg0, REAL_TYPE))
10527 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10528 break;
10529
10530 CASE_FLT_FN (BUILT_IN_ERFC):
10531 if (validate_arg (arg0, REAL_TYPE))
10532 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10533 break;
10534
10535 CASE_FLT_FN (BUILT_IN_TGAMMA):
10536 if (validate_arg (arg0, REAL_TYPE))
10537 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10538 break;
10539
10540 CASE_FLT_FN (BUILT_IN_EXP):
10541 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10542
10543 CASE_FLT_FN (BUILT_IN_EXP2):
10544 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10545
10546 CASE_FLT_FN (BUILT_IN_EXP10):
10547 CASE_FLT_FN (BUILT_IN_POW10):
10548 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10549
10550 CASE_FLT_FN (BUILT_IN_EXPM1):
10551 if (validate_arg (arg0, REAL_TYPE))
10552 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10553 break;
10554
10555 CASE_FLT_FN (BUILT_IN_LOG):
10556 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10557
10558 CASE_FLT_FN (BUILT_IN_LOG2):
10559 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10560
10561 CASE_FLT_FN (BUILT_IN_LOG10):
10562 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10563
10564 CASE_FLT_FN (BUILT_IN_LOG1P):
10565 if (validate_arg (arg0, REAL_TYPE))
10566 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10567 &dconstm1, NULL, false);
10568 break;
10569
10570 CASE_FLT_FN (BUILT_IN_J0):
10571 if (validate_arg (arg0, REAL_TYPE))
10572 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10573 NULL, NULL, 0);
10574 break;
10575
10576 CASE_FLT_FN (BUILT_IN_J1):
10577 if (validate_arg (arg0, REAL_TYPE))
10578 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10579 NULL, NULL, 0);
10580 break;
10581
10582 CASE_FLT_FN (BUILT_IN_Y0):
10583 if (validate_arg (arg0, REAL_TYPE))
10584 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10585 &dconst0, NULL, false);
10586 break;
10587
10588 CASE_FLT_FN (BUILT_IN_Y1):
10589 if (validate_arg (arg0, REAL_TYPE))
10590 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10591 &dconst0, NULL, false);
10592 break;
10593
10594 CASE_FLT_FN (BUILT_IN_NAN):
10595 case BUILT_IN_NAND32:
10596 case BUILT_IN_NAND64:
10597 case BUILT_IN_NAND128:
10598 return fold_builtin_nan (arg0, type, true);
10599
10600 CASE_FLT_FN (BUILT_IN_NANS):
10601 return fold_builtin_nan (arg0, type, false);
10602
10603 CASE_FLT_FN (BUILT_IN_FLOOR):
10604 return fold_builtin_floor (loc, fndecl, arg0);
10605
10606 CASE_FLT_FN (BUILT_IN_CEIL):
10607 return fold_builtin_ceil (loc, fndecl, arg0);
10608
10609 CASE_FLT_FN (BUILT_IN_TRUNC):
10610 return fold_builtin_trunc (loc, fndecl, arg0);
10611
10612 CASE_FLT_FN (BUILT_IN_ROUND):
10613 return fold_builtin_round (loc, fndecl, arg0);
10614
10615 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10616 CASE_FLT_FN (BUILT_IN_RINT):
10617 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10618
10619 CASE_FLT_FN (BUILT_IN_ICEIL):
10620 CASE_FLT_FN (BUILT_IN_LCEIL):
10621 CASE_FLT_FN (BUILT_IN_LLCEIL):
10622 CASE_FLT_FN (BUILT_IN_LFLOOR):
10623 CASE_FLT_FN (BUILT_IN_IFLOOR):
10624 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10625 CASE_FLT_FN (BUILT_IN_IROUND):
10626 CASE_FLT_FN (BUILT_IN_LROUND):
10627 CASE_FLT_FN (BUILT_IN_LLROUND):
10628 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10629
10630 CASE_FLT_FN (BUILT_IN_IRINT):
10631 CASE_FLT_FN (BUILT_IN_LRINT):
10632 CASE_FLT_FN (BUILT_IN_LLRINT):
10633 return fold_fixed_mathfn (loc, fndecl, arg0);
10634
10635 case BUILT_IN_BSWAP16:
10636 case BUILT_IN_BSWAP32:
10637 case BUILT_IN_BSWAP64:
10638 return fold_builtin_bswap (fndecl, arg0);
10639
10640 CASE_INT_FN (BUILT_IN_FFS):
10641 CASE_INT_FN (BUILT_IN_CLZ):
10642 CASE_INT_FN (BUILT_IN_CTZ):
10643 CASE_INT_FN (BUILT_IN_CLRSB):
10644 CASE_INT_FN (BUILT_IN_POPCOUNT):
10645 CASE_INT_FN (BUILT_IN_PARITY):
10646 return fold_builtin_bitop (fndecl, arg0);
10647
10648 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10649 return fold_builtin_signbit (loc, arg0, type);
10650
10651 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10652 return fold_builtin_significand (loc, arg0, type);
10653
10654 CASE_FLT_FN (BUILT_IN_ILOGB):
10655 CASE_FLT_FN (BUILT_IN_LOGB):
10656 return fold_builtin_logb (loc, arg0, type);
10657
10658 case BUILT_IN_ISASCII:
10659 return fold_builtin_isascii (loc, arg0);
10660
10661 case BUILT_IN_TOASCII:
10662 return fold_builtin_toascii (loc, arg0);
10663
10664 case BUILT_IN_ISDIGIT:
10665 return fold_builtin_isdigit (loc, arg0);
10666
10667 CASE_FLT_FN (BUILT_IN_FINITE):
10668 case BUILT_IN_FINITED32:
10669 case BUILT_IN_FINITED64:
10670 case BUILT_IN_FINITED128:
10671 case BUILT_IN_ISFINITE:
10672 {
10673 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10674 if (ret)
10675 return ret;
10676 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10677 }
10678
10679 CASE_FLT_FN (BUILT_IN_ISINF):
10680 case BUILT_IN_ISINFD32:
10681 case BUILT_IN_ISINFD64:
10682 case BUILT_IN_ISINFD128:
10683 {
10684 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10685 if (ret)
10686 return ret;
10687 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10688 }
10689
10690 case BUILT_IN_ISNORMAL:
10691 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10692
10693 case BUILT_IN_ISINF_SIGN:
10694 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10695
10696 CASE_FLT_FN (BUILT_IN_ISNAN):
10697 case BUILT_IN_ISNAND32:
10698 case BUILT_IN_ISNAND64:
10699 case BUILT_IN_ISNAND128:
10700 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10701
10702 case BUILT_IN_PRINTF:
10703 case BUILT_IN_PRINTF_UNLOCKED:
10704 case BUILT_IN_VPRINTF:
10705 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10706
10707 case BUILT_IN_FREE:
10708 if (integer_zerop (arg0))
10709 return build_empty_stmt (loc);
10710 break;
10711
10712 default:
10713 break;
10714 }
10715
10716 return NULL_TREE;
10717
10718 }
10719
10720 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10721 IGNORE is true if the result of the function call is ignored. This
10722 function returns NULL_TREE if no simplification was possible. */
10723
10724 static tree
10725 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10726 {
10727 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10728 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10729
10730 switch (fcode)
10731 {
10732 CASE_FLT_FN (BUILT_IN_JN):
10733 if (validate_arg (arg0, INTEGER_TYPE)
10734 && validate_arg (arg1, REAL_TYPE))
10735 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10736 break;
10737
10738 CASE_FLT_FN (BUILT_IN_YN):
10739 if (validate_arg (arg0, INTEGER_TYPE)
10740 && validate_arg (arg1, REAL_TYPE))
10741 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10742 &dconst0, false);
10743 break;
10744
10745 CASE_FLT_FN (BUILT_IN_DREM):
10746 CASE_FLT_FN (BUILT_IN_REMAINDER):
10747 if (validate_arg (arg0, REAL_TYPE)
10748 && validate_arg (arg1, REAL_TYPE))
10749 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10750 break;
10751
10752 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10753 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10754 if (validate_arg (arg0, REAL_TYPE)
10755 && validate_arg (arg1, POINTER_TYPE))
10756 return do_mpfr_lgamma_r (arg0, arg1, type);
10757 break;
10758
10759 CASE_FLT_FN (BUILT_IN_ATAN2):
10760 if (validate_arg (arg0, REAL_TYPE)
10761 && validate_arg (arg1, REAL_TYPE))
10762 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10763 break;
10764
10765 CASE_FLT_FN (BUILT_IN_FDIM):
10766 if (validate_arg (arg0, REAL_TYPE)
10767 && validate_arg (arg1, REAL_TYPE))
10768 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10769 break;
10770
10771 CASE_FLT_FN (BUILT_IN_HYPOT):
10772 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10773
10774 CASE_FLT_FN (BUILT_IN_CPOW):
10775 if (validate_arg (arg0, COMPLEX_TYPE)
10776 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10777 && validate_arg (arg1, COMPLEX_TYPE)
10778 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10779 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10780 break;
10781
10782 CASE_FLT_FN (BUILT_IN_LDEXP):
10783 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10784 CASE_FLT_FN (BUILT_IN_SCALBN):
10785 CASE_FLT_FN (BUILT_IN_SCALBLN):
10786 return fold_builtin_load_exponent (loc, arg0, arg1,
10787 type, /*ldexp=*/false);
10788
10789 CASE_FLT_FN (BUILT_IN_FREXP):
10790 return fold_builtin_frexp (loc, arg0, arg1, type);
10791
10792 CASE_FLT_FN (BUILT_IN_MODF):
10793 return fold_builtin_modf (loc, arg0, arg1, type);
10794
10795 case BUILT_IN_BZERO:
10796 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10797
10798 case BUILT_IN_FPUTS:
10799 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10800
10801 case BUILT_IN_FPUTS_UNLOCKED:
10802 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10803
10804 case BUILT_IN_STRSTR:
10805 return fold_builtin_strstr (loc, arg0, arg1, type);
10806
10807 case BUILT_IN_STRCAT:
10808 return fold_builtin_strcat (loc, arg0, arg1, NULL_TREE);
10809
10810 case BUILT_IN_STRSPN:
10811 return fold_builtin_strspn (loc, arg0, arg1);
10812
10813 case BUILT_IN_STRCSPN:
10814 return fold_builtin_strcspn (loc, arg0, arg1);
10815
10816 case BUILT_IN_STRCHR:
10817 case BUILT_IN_INDEX:
10818 return fold_builtin_strchr (loc, arg0, arg1, type);
10819
10820 case BUILT_IN_STRRCHR:
10821 case BUILT_IN_RINDEX:
10822 return fold_builtin_strrchr (loc, arg0, arg1, type);
10823
10824 case BUILT_IN_STRCPY:
10825 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10826
10827 case BUILT_IN_STPCPY:
10828 if (ignore)
10829 {
10830 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10831 if (!fn)
10832 break;
10833
10834 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10835 }
10836 else
10837 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10838 break;
10839
10840 case BUILT_IN_STRCMP:
10841 return fold_builtin_strcmp (loc, arg0, arg1);
10842
10843 case BUILT_IN_STRPBRK:
10844 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10845
10846 case BUILT_IN_EXPECT:
10847 return fold_builtin_expect (loc, arg0, arg1);
10848
10849 CASE_FLT_FN (BUILT_IN_POW):
10850 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10851
10852 CASE_FLT_FN (BUILT_IN_POWI):
10853 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10854
10855 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10856 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10857
10858 CASE_FLT_FN (BUILT_IN_FMIN):
10859 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10860
10861 CASE_FLT_FN (BUILT_IN_FMAX):
10862 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10863
10864 case BUILT_IN_ISGREATER:
10865 return fold_builtin_unordered_cmp (loc, fndecl,
10866 arg0, arg1, UNLE_EXPR, LE_EXPR);
10867 case BUILT_IN_ISGREATEREQUAL:
10868 return fold_builtin_unordered_cmp (loc, fndecl,
10869 arg0, arg1, UNLT_EXPR, LT_EXPR);
10870 case BUILT_IN_ISLESS:
10871 return fold_builtin_unordered_cmp (loc, fndecl,
10872 arg0, arg1, UNGE_EXPR, GE_EXPR);
10873 case BUILT_IN_ISLESSEQUAL:
10874 return fold_builtin_unordered_cmp (loc, fndecl,
10875 arg0, arg1, UNGT_EXPR, GT_EXPR);
10876 case BUILT_IN_ISLESSGREATER:
10877 return fold_builtin_unordered_cmp (loc, fndecl,
10878 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10879 case BUILT_IN_ISUNORDERED:
10880 return fold_builtin_unordered_cmp (loc, fndecl,
10881 arg0, arg1, UNORDERED_EXPR,
10882 NOP_EXPR);
10883
10884 /* We do the folding for va_start in the expander. */
10885 case BUILT_IN_VA_START:
10886 break;
10887
10888 case BUILT_IN_SPRINTF:
10889 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10890
10891 case BUILT_IN_OBJECT_SIZE:
10892 return fold_builtin_object_size (arg0, arg1);
10893
10894 case BUILT_IN_PRINTF:
10895 case BUILT_IN_PRINTF_UNLOCKED:
10896 case BUILT_IN_VPRINTF:
10897 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10898
10899 case BUILT_IN_PRINTF_CHK:
10900 case BUILT_IN_VPRINTF_CHK:
10901 if (!validate_arg (arg0, INTEGER_TYPE)
10902 || TREE_SIDE_EFFECTS (arg0))
10903 return NULL_TREE;
10904 else
10905 return fold_builtin_printf (loc, fndecl,
10906 arg1, NULL_TREE, ignore, fcode);
10907 break;
10908
10909 case BUILT_IN_FPRINTF:
10910 case BUILT_IN_FPRINTF_UNLOCKED:
10911 case BUILT_IN_VFPRINTF:
10912 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10913 ignore, fcode);
10914
10915 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10916 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10917
10918 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10919 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10920
10921 default:
10922 break;
10923 }
10924 return NULL_TREE;
10925 }
10926
10927 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10928 and ARG2. IGNORE is true if the result of the function call is ignored.
10929 This function returns NULL_TREE if no simplification was possible. */
10930
10931 static tree
10932 fold_builtin_3 (location_t loc, tree fndecl,
10933 tree arg0, tree arg1, tree arg2, bool ignore)
10934 {
10935 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10936 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10937 switch (fcode)
10938 {
10939
10940 CASE_FLT_FN (BUILT_IN_SINCOS):
10941 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10942
10943 CASE_FLT_FN (BUILT_IN_FMA):
10944 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10945 break;
10946
10947 CASE_FLT_FN (BUILT_IN_REMQUO):
10948 if (validate_arg (arg0, REAL_TYPE)
10949 && validate_arg (arg1, REAL_TYPE)
10950 && validate_arg (arg2, POINTER_TYPE))
10951 return do_mpfr_remquo (arg0, arg1, arg2);
10952 break;
10953
10954 case BUILT_IN_MEMSET:
10955 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10956
10957 case BUILT_IN_BCOPY:
10958 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10959 void_type_node, true, /*endp=*/3);
10960
10961 case BUILT_IN_MEMCPY:
10962 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10963 type, ignore, /*endp=*/0);
10964
10965 case BUILT_IN_MEMPCPY:
10966 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10967 type, ignore, /*endp=*/1);
10968
10969 case BUILT_IN_MEMMOVE:
10970 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10971 type, ignore, /*endp=*/3);
10972
10973 case BUILT_IN_STRNCAT:
10974 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10975
10976 case BUILT_IN_STRNCPY:
10977 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10978
10979 case BUILT_IN_STRNCMP:
10980 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10981
10982 case BUILT_IN_MEMCHR:
10983 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10984
10985 case BUILT_IN_BCMP:
10986 case BUILT_IN_MEMCMP:
10987 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10988
10989 case BUILT_IN_SPRINTF:
10990 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10991
10992 case BUILT_IN_SNPRINTF:
10993 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10994
10995 case BUILT_IN_STRCPY_CHK:
10996 case BUILT_IN_STPCPY_CHK:
10997 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10998 ignore, fcode);
10999
11000 case BUILT_IN_STRCAT_CHK:
11001 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11002
11003 case BUILT_IN_PRINTF_CHK:
11004 case BUILT_IN_VPRINTF_CHK:
11005 if (!validate_arg (arg0, INTEGER_TYPE)
11006 || TREE_SIDE_EFFECTS (arg0))
11007 return NULL_TREE;
11008 else
11009 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11010 break;
11011
11012 case BUILT_IN_FPRINTF:
11013 case BUILT_IN_FPRINTF_UNLOCKED:
11014 case BUILT_IN_VFPRINTF:
11015 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11016 ignore, fcode);
11017
11018 case BUILT_IN_FPRINTF_CHK:
11019 case BUILT_IN_VFPRINTF_CHK:
11020 if (!validate_arg (arg1, INTEGER_TYPE)
11021 || TREE_SIDE_EFFECTS (arg1))
11022 return NULL_TREE;
11023 else
11024 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11025 ignore, fcode);
11026
11027 default:
11028 break;
11029 }
11030 return NULL_TREE;
11031 }
11032
11033 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11034 ARG2, and ARG3. IGNORE is true if the result of the function call is
11035 ignored. This function returns NULL_TREE if no simplification was
11036 possible. */
11037
11038 static tree
11039 fold_builtin_4 (location_t loc, tree fndecl,
11040 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11041 {
11042 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11043
11044 switch (fcode)
11045 {
11046 case BUILT_IN_MEMCPY_CHK:
11047 case BUILT_IN_MEMPCPY_CHK:
11048 case BUILT_IN_MEMMOVE_CHK:
11049 case BUILT_IN_MEMSET_CHK:
11050 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11051 NULL_TREE, ignore,
11052 DECL_FUNCTION_CODE (fndecl));
11053
11054 case BUILT_IN_STRNCPY_CHK:
11055 case BUILT_IN_STPNCPY_CHK:
11056 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11057 ignore, fcode);
11058
11059 case BUILT_IN_STRNCAT_CHK:
11060 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11061
11062 case BUILT_IN_SNPRINTF:
11063 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11064
11065 case BUILT_IN_FPRINTF_CHK:
11066 case BUILT_IN_VFPRINTF_CHK:
11067 if (!validate_arg (arg1, INTEGER_TYPE)
11068 || TREE_SIDE_EFFECTS (arg1))
11069 return NULL_TREE;
11070 else
11071 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11072 ignore, fcode);
11073 break;
11074
11075 default:
11076 break;
11077 }
11078 return NULL_TREE;
11079 }
11080
11081 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11082 arguments, where NARGS <= 4. IGNORE is true if the result of the
11083 function call is ignored. This function returns NULL_TREE if no
11084 simplification was possible. Note that this only folds builtins with
11085 fixed argument patterns. Foldings that do varargs-to-varargs
11086 transformations, or that match calls with more than 4 arguments,
11087 need to be handled with fold_builtin_varargs instead. */
11088
11089 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11090
11091 static tree
11092 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11093 {
11094 tree ret = NULL_TREE;
11095
11096 switch (nargs)
11097 {
11098 case 0:
11099 ret = fold_builtin_0 (loc, fndecl, ignore);
11100 break;
11101 case 1:
11102 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11103 break;
11104 case 2:
11105 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11106 break;
11107 case 3:
11108 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11109 break;
11110 case 4:
11111 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11112 ignore);
11113 break;
11114 default:
11115 break;
11116 }
11117 if (ret)
11118 {
11119 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11120 SET_EXPR_LOCATION (ret, loc);
11121 TREE_NO_WARNING (ret) = 1;
11122 return ret;
11123 }
11124 return NULL_TREE;
11125 }
11126
11127 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11128 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11129 of arguments in ARGS to be omitted. OLDNARGS is the number of
11130 elements in ARGS. */
11131
11132 static tree
11133 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11134 int skip, tree fndecl, int n, va_list newargs)
11135 {
11136 int nargs = oldnargs - skip + n;
11137 tree *buffer;
11138
11139 if (n > 0)
11140 {
11141 int i, j;
11142
11143 buffer = XALLOCAVEC (tree, nargs);
11144 for (i = 0; i < n; i++)
11145 buffer[i] = va_arg (newargs, tree);
11146 for (j = skip; j < oldnargs; j++, i++)
11147 buffer[i] = args[j];
11148 }
11149 else
11150 buffer = args + skip;
11151
11152 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11153 }
11154
11155 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11156 list ARGS along with N new arguments specified as the "..."
11157 parameters. SKIP is the number of arguments in ARGS to be omitted.
11158 OLDNARGS is the number of elements in ARGS. */
11159
11160 static tree
11161 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11162 int skip, tree fndecl, int n, ...)
11163 {
11164 va_list ap;
11165 tree t;
11166
11167 va_start (ap, n);
11168 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11169 va_end (ap);
11170
11171 return t;
11172 }
11173
11174 /* Return true if FNDECL shouldn't be folded right now.
11175 If a built-in function has an inline attribute always_inline
11176 wrapper, defer folding it after always_inline functions have
11177 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11178 might not be performed. */
11179
11180 bool
11181 avoid_folding_inline_builtin (tree fndecl)
11182 {
11183 return (DECL_DECLARED_INLINE_P (fndecl)
11184 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11185 && cfun
11186 && !cfun->always_inline_functions_inlined
11187 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11188 }
11189
11190 /* A wrapper function for builtin folding that prevents warnings for
11191 "statement without effect" and the like, caused by removing the
11192 call node earlier than the warning is generated. */
11193
11194 tree
11195 fold_call_expr (location_t loc, tree exp, bool ignore)
11196 {
11197 tree ret = NULL_TREE;
11198 tree fndecl = get_callee_fndecl (exp);
11199 if (fndecl
11200 && TREE_CODE (fndecl) == FUNCTION_DECL
11201 && DECL_BUILT_IN (fndecl)
11202 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11203 yet. Defer folding until we see all the arguments
11204 (after inlining). */
11205 && !CALL_EXPR_VA_ARG_PACK (exp))
11206 {
11207 int nargs = call_expr_nargs (exp);
11208
11209 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11210 instead last argument is __builtin_va_arg_pack (). Defer folding
11211 even in that case, until arguments are finalized. */
11212 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11213 {
11214 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11215 if (fndecl2
11216 && TREE_CODE (fndecl2) == FUNCTION_DECL
11217 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11218 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11219 return NULL_TREE;
11220 }
11221
11222 if (avoid_folding_inline_builtin (fndecl))
11223 return NULL_TREE;
11224
11225 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11226 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11227 CALL_EXPR_ARGP (exp), ignore);
11228 else
11229 {
11230 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11231 {
11232 tree *args = CALL_EXPR_ARGP (exp);
11233 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11234 }
11235 if (!ret)
11236 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11237 if (ret)
11238 return ret;
11239 }
11240 }
11241 return NULL_TREE;
11242 }
11243
11244 /* Conveniently construct a function call expression. FNDECL names the
11245 function to be called and N arguments are passed in the array
11246 ARGARRAY. */
11247
11248 tree
11249 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11250 {
11251 tree fntype = TREE_TYPE (fndecl);
11252 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11253
11254 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11255 }
11256
11257 /* Conveniently construct a function call expression. FNDECL names the
11258 function to be called and the arguments are passed in the vector
11259 VEC. */
11260
11261 tree
11262 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11263 {
11264 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11265 vec_safe_address (vec));
11266 }
11267
11268
11269 /* Conveniently construct a function call expression. FNDECL names the
11270 function to be called, N is the number of arguments, and the "..."
11271 parameters are the argument expressions. */
11272
11273 tree
11274 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11275 {
11276 va_list ap;
11277 tree *argarray = XALLOCAVEC (tree, n);
11278 int i;
11279
11280 va_start (ap, n);
11281 for (i = 0; i < n; i++)
11282 argarray[i] = va_arg (ap, tree);
11283 va_end (ap);
11284 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11285 }
11286
11287 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11288 varargs macros aren't supported by all bootstrap compilers. */
11289
11290 tree
11291 build_call_expr (tree fndecl, int n, ...)
11292 {
11293 va_list ap;
11294 tree *argarray = XALLOCAVEC (tree, n);
11295 int i;
11296
11297 va_start (ap, n);
11298 for (i = 0; i < n; i++)
11299 argarray[i] = va_arg (ap, tree);
11300 va_end (ap);
11301 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11302 }
11303
11304 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11305 N arguments are passed in the array ARGARRAY. */
11306
11307 tree
11308 fold_builtin_call_array (location_t loc, tree type,
11309 tree fn,
11310 int n,
11311 tree *argarray)
11312 {
11313 tree ret = NULL_TREE;
11314 tree exp;
11315
11316 if (TREE_CODE (fn) == ADDR_EXPR)
11317 {
11318 tree fndecl = TREE_OPERAND (fn, 0);
11319 if (TREE_CODE (fndecl) == FUNCTION_DECL
11320 && DECL_BUILT_IN (fndecl))
11321 {
11322 /* If last argument is __builtin_va_arg_pack (), arguments to this
11323 function are not finalized yet. Defer folding until they are. */
11324 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11325 {
11326 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11327 if (fndecl2
11328 && TREE_CODE (fndecl2) == FUNCTION_DECL
11329 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11330 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11331 return build_call_array_loc (loc, type, fn, n, argarray);
11332 }
11333 if (avoid_folding_inline_builtin (fndecl))
11334 return build_call_array_loc (loc, type, fn, n, argarray);
11335 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11336 {
11337 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11338 if (ret)
11339 return ret;
11340
11341 return build_call_array_loc (loc, type, fn, n, argarray);
11342 }
11343 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11344 {
11345 /* First try the transformations that don't require consing up
11346 an exp. */
11347 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11348 if (ret)
11349 return ret;
11350 }
11351
11352 /* If we got this far, we need to build an exp. */
11353 exp = build_call_array_loc (loc, type, fn, n, argarray);
11354 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11355 return ret ? ret : exp;
11356 }
11357 }
11358
11359 return build_call_array_loc (loc, type, fn, n, argarray);
11360 }
11361
11362 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11363 along with N new arguments specified as the "..." parameters. SKIP
11364 is the number of arguments in EXP to be omitted. This function is used
11365 to do varargs-to-varargs transformations. */
11366
11367 static tree
11368 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11369 {
11370 va_list ap;
11371 tree t;
11372
11373 va_start (ap, n);
11374 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11375 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11376 va_end (ap);
11377
11378 return t;
11379 }
11380
11381 /* Validate a single argument ARG against a tree code CODE representing
11382 a type. */
11383
11384 static bool
11385 validate_arg (const_tree arg, enum tree_code code)
11386 {
11387 if (!arg)
11388 return false;
11389 else if (code == POINTER_TYPE)
11390 return POINTER_TYPE_P (TREE_TYPE (arg));
11391 else if (code == INTEGER_TYPE)
11392 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11393 return code == TREE_CODE (TREE_TYPE (arg));
11394 }
11395
11396 /* This function validates the types of a function call argument list
11397 against a specified list of tree_codes. If the last specifier is a 0,
11398 that represents an ellipses, otherwise the last specifier must be a
11399 VOID_TYPE.
11400
11401 This is the GIMPLE version of validate_arglist. Eventually we want to
11402 completely convert builtins.c to work from GIMPLEs and the tree based
11403 validate_arglist will then be removed. */
11404
11405 bool
11406 validate_gimple_arglist (const_gimple call, ...)
11407 {
11408 enum tree_code code;
11409 bool res = 0;
11410 va_list ap;
11411 const_tree arg;
11412 size_t i;
11413
11414 va_start (ap, call);
11415 i = 0;
11416
11417 do
11418 {
11419 code = (enum tree_code) va_arg (ap, int);
11420 switch (code)
11421 {
11422 case 0:
11423 /* This signifies an ellipses, any further arguments are all ok. */
11424 res = true;
11425 goto end;
11426 case VOID_TYPE:
11427 /* This signifies an endlink, if no arguments remain, return
11428 true, otherwise return false. */
11429 res = (i == gimple_call_num_args (call));
11430 goto end;
11431 default:
11432 /* If no parameters remain or the parameter's code does not
11433 match the specified code, return false. Otherwise continue
11434 checking any remaining arguments. */
11435 arg = gimple_call_arg (call, i++);
11436 if (!validate_arg (arg, code))
11437 goto end;
11438 break;
11439 }
11440 }
11441 while (1);
11442
11443 /* We need gotos here since we can only have one VA_CLOSE in a
11444 function. */
11445 end: ;
11446 va_end (ap);
11447
11448 return res;
11449 }
11450
11451 /* Default target-specific builtin expander that does nothing. */
11452
11453 rtx
11454 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11455 rtx target ATTRIBUTE_UNUSED,
11456 rtx subtarget ATTRIBUTE_UNUSED,
11457 enum machine_mode mode ATTRIBUTE_UNUSED,
11458 int ignore ATTRIBUTE_UNUSED)
11459 {
11460 return NULL_RTX;
11461 }
11462
11463 /* Returns true is EXP represents data that would potentially reside
11464 in a readonly section. */
11465
11466 static bool
11467 readonly_data_expr (tree exp)
11468 {
11469 STRIP_NOPS (exp);
11470
11471 if (TREE_CODE (exp) != ADDR_EXPR)
11472 return false;
11473
11474 exp = get_base_address (TREE_OPERAND (exp, 0));
11475 if (!exp)
11476 return false;
11477
11478 /* Make sure we call decl_readonly_section only for trees it
11479 can handle (since it returns true for everything it doesn't
11480 understand). */
11481 if (TREE_CODE (exp) == STRING_CST
11482 || TREE_CODE (exp) == CONSTRUCTOR
11483 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11484 return decl_readonly_section (exp, 0);
11485 else
11486 return false;
11487 }
11488
11489 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11490 to the call, and TYPE is its return type.
11491
11492 Return NULL_TREE if no simplification was possible, otherwise return the
11493 simplified form of the call as a tree.
11494
11495 The simplified form may be a constant or other expression which
11496 computes the same value, but in a more efficient manner (including
11497 calls to other builtin functions).
11498
11499 The call may contain arguments which need to be evaluated, but
11500 which are not useful to determine the result of the call. In
11501 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11502 COMPOUND_EXPR will be an argument which must be evaluated.
11503 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11504 COMPOUND_EXPR in the chain will contain the tree for the simplified
11505 form of the builtin function call. */
11506
11507 static tree
11508 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11509 {
11510 if (!validate_arg (s1, POINTER_TYPE)
11511 || !validate_arg (s2, POINTER_TYPE))
11512 return NULL_TREE;
11513 else
11514 {
11515 tree fn;
11516 const char *p1, *p2;
11517
11518 p2 = c_getstr (s2);
11519 if (p2 == NULL)
11520 return NULL_TREE;
11521
11522 p1 = c_getstr (s1);
11523 if (p1 != NULL)
11524 {
11525 const char *r = strstr (p1, p2);
11526 tree tem;
11527
11528 if (r == NULL)
11529 return build_int_cst (TREE_TYPE (s1), 0);
11530
11531 /* Return an offset into the constant string argument. */
11532 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11533 return fold_convert_loc (loc, type, tem);
11534 }
11535
11536 /* The argument is const char *, and the result is char *, so we need
11537 a type conversion here to avoid a warning. */
11538 if (p2[0] == '\0')
11539 return fold_convert_loc (loc, type, s1);
11540
11541 if (p2[1] != '\0')
11542 return NULL_TREE;
11543
11544 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11545 if (!fn)
11546 return NULL_TREE;
11547
11548 /* New argument list transforming strstr(s1, s2) to
11549 strchr(s1, s2[0]). */
11550 return build_call_expr_loc (loc, fn, 2, s1,
11551 build_int_cst (integer_type_node, p2[0]));
11552 }
11553 }
11554
11555 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11556 the call, and TYPE is its return type.
11557
11558 Return NULL_TREE if no simplification was possible, otherwise return the
11559 simplified form of the call as a tree.
11560
11561 The simplified form may be a constant or other expression which
11562 computes the same value, but in a more efficient manner (including
11563 calls to other builtin functions).
11564
11565 The call may contain arguments which need to be evaluated, but
11566 which are not useful to determine the result of the call. In
11567 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11568 COMPOUND_EXPR will be an argument which must be evaluated.
11569 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11570 COMPOUND_EXPR in the chain will contain the tree for the simplified
11571 form of the builtin function call. */
11572
11573 static tree
11574 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11575 {
11576 if (!validate_arg (s1, POINTER_TYPE)
11577 || !validate_arg (s2, INTEGER_TYPE))
11578 return NULL_TREE;
11579 else
11580 {
11581 const char *p1;
11582
11583 if (TREE_CODE (s2) != INTEGER_CST)
11584 return NULL_TREE;
11585
11586 p1 = c_getstr (s1);
11587 if (p1 != NULL)
11588 {
11589 char c;
11590 const char *r;
11591 tree tem;
11592
11593 if (target_char_cast (s2, &c))
11594 return NULL_TREE;
11595
11596 r = strchr (p1, c);
11597
11598 if (r == NULL)
11599 return build_int_cst (TREE_TYPE (s1), 0);
11600
11601 /* Return an offset into the constant string argument. */
11602 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11603 return fold_convert_loc (loc, type, tem);
11604 }
11605 return NULL_TREE;
11606 }
11607 }
11608
11609 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11610 the call, and TYPE is its return type.
11611
11612 Return NULL_TREE if no simplification was possible, otherwise return the
11613 simplified form of the call as a tree.
11614
11615 The simplified form may be a constant or other expression which
11616 computes the same value, but in a more efficient manner (including
11617 calls to other builtin functions).
11618
11619 The call may contain arguments which need to be evaluated, but
11620 which are not useful to determine the result of the call. In
11621 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11622 COMPOUND_EXPR will be an argument which must be evaluated.
11623 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11624 COMPOUND_EXPR in the chain will contain the tree for the simplified
11625 form of the builtin function call. */
11626
11627 static tree
11628 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11629 {
11630 if (!validate_arg (s1, POINTER_TYPE)
11631 || !validate_arg (s2, INTEGER_TYPE))
11632 return NULL_TREE;
11633 else
11634 {
11635 tree fn;
11636 const char *p1;
11637
11638 if (TREE_CODE (s2) != INTEGER_CST)
11639 return NULL_TREE;
11640
11641 p1 = c_getstr (s1);
11642 if (p1 != NULL)
11643 {
11644 char c;
11645 const char *r;
11646 tree tem;
11647
11648 if (target_char_cast (s2, &c))
11649 return NULL_TREE;
11650
11651 r = strrchr (p1, c);
11652
11653 if (r == NULL)
11654 return build_int_cst (TREE_TYPE (s1), 0);
11655
11656 /* Return an offset into the constant string argument. */
11657 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11658 return fold_convert_loc (loc, type, tem);
11659 }
11660
11661 if (! integer_zerop (s2))
11662 return NULL_TREE;
11663
11664 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11665 if (!fn)
11666 return NULL_TREE;
11667
11668 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11669 return build_call_expr_loc (loc, fn, 2, s1, s2);
11670 }
11671 }
11672
11673 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11674 to the call, and TYPE is its return type.
11675
11676 Return NULL_TREE if no simplification was possible, otherwise return the
11677 simplified form of the call as a tree.
11678
11679 The simplified form may be a constant or other expression which
11680 computes the same value, but in a more efficient manner (including
11681 calls to other builtin functions).
11682
11683 The call may contain arguments which need to be evaluated, but
11684 which are not useful to determine the result of the call. In
11685 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11686 COMPOUND_EXPR will be an argument which must be evaluated.
11687 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11688 COMPOUND_EXPR in the chain will contain the tree for the simplified
11689 form of the builtin function call. */
11690
11691 static tree
11692 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11693 {
11694 if (!validate_arg (s1, POINTER_TYPE)
11695 || !validate_arg (s2, POINTER_TYPE))
11696 return NULL_TREE;
11697 else
11698 {
11699 tree fn;
11700 const char *p1, *p2;
11701
11702 p2 = c_getstr (s2);
11703 if (p2 == NULL)
11704 return NULL_TREE;
11705
11706 p1 = c_getstr (s1);
11707 if (p1 != NULL)
11708 {
11709 const char *r = strpbrk (p1, p2);
11710 tree tem;
11711
11712 if (r == NULL)
11713 return build_int_cst (TREE_TYPE (s1), 0);
11714
11715 /* Return an offset into the constant string argument. */
11716 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11717 return fold_convert_loc (loc, type, tem);
11718 }
11719
11720 if (p2[0] == '\0')
11721 /* strpbrk(x, "") == NULL.
11722 Evaluate and ignore s1 in case it had side-effects. */
11723 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11724
11725 if (p2[1] != '\0')
11726 return NULL_TREE; /* Really call strpbrk. */
11727
11728 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11729 if (!fn)
11730 return NULL_TREE;
11731
11732 /* New argument list transforming strpbrk(s1, s2) to
11733 strchr(s1, s2[0]). */
11734 return build_call_expr_loc (loc, fn, 2, s1,
11735 build_int_cst (integer_type_node, p2[0]));
11736 }
11737 }
11738
11739 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11740 to the call.
11741
11742 Return NULL_TREE if no simplification was possible, otherwise return the
11743 simplified form of the call as a tree.
11744
11745 The simplified form may be a constant or other expression which
11746 computes the same value, but in a more efficient manner (including
11747 calls to other builtin functions).
11748
11749 The call may contain arguments which need to be evaluated, but
11750 which are not useful to determine the result of the call. In
11751 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11752 COMPOUND_EXPR will be an argument which must be evaluated.
11753 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11754 COMPOUND_EXPR in the chain will contain the tree for the simplified
11755 form of the builtin function call. */
11756
11757 tree
11758 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src,
11759 tree len)
11760 {
11761 if (!validate_arg (dst, POINTER_TYPE)
11762 || !validate_arg (src, POINTER_TYPE))
11763 return NULL_TREE;
11764 else
11765 {
11766 const char *p = c_getstr (src);
11767
11768 /* If the string length is zero, return the dst parameter. */
11769 if (p && *p == '\0')
11770 return dst;
11771
11772 if (optimize_insn_for_speed_p ())
11773 {
11774 /* See if we can store by pieces into (dst + strlen(dst)). */
11775 tree newdst, call;
11776 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11777 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
11778
11779 if (!strlen_fn || !memcpy_fn)
11780 return NULL_TREE;
11781
11782 /* If the length of the source string isn't computable don't
11783 split strcat into strlen and memcpy. */
11784 if (! len)
11785 len = c_strlen (src, 1);
11786 if (! len || TREE_SIDE_EFFECTS (len))
11787 return NULL_TREE;
11788
11789 /* Stabilize the argument list. */
11790 dst = builtin_save_expr (dst);
11791
11792 /* Create strlen (dst). */
11793 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11794 /* Create (dst p+ strlen (dst)). */
11795
11796 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11797 newdst = builtin_save_expr (newdst);
11798
11799 len = fold_convert_loc (loc, size_type_node, len);
11800 len = size_binop_loc (loc, PLUS_EXPR, len,
11801 build_int_cst (size_type_node, 1));
11802
11803 call = build_call_expr_loc (loc, memcpy_fn, 3, newdst, src, len);
11804 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11805 }
11806 return NULL_TREE;
11807 }
11808 }
11809
11810 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11811 arguments to the call.
11812
11813 Return NULL_TREE if no simplification was possible, otherwise return the
11814 simplified form of the call as a tree.
11815
11816 The simplified form may be a constant or other expression which
11817 computes the same value, but in a more efficient manner (including
11818 calls to other builtin functions).
11819
11820 The call may contain arguments which need to be evaluated, but
11821 which are not useful to determine the result of the call. In
11822 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11823 COMPOUND_EXPR will be an argument which must be evaluated.
11824 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11825 COMPOUND_EXPR in the chain will contain the tree for the simplified
11826 form of the builtin function call. */
11827
11828 static tree
11829 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11830 {
11831 if (!validate_arg (dst, POINTER_TYPE)
11832 || !validate_arg (src, POINTER_TYPE)
11833 || !validate_arg (len, INTEGER_TYPE))
11834 return NULL_TREE;
11835 else
11836 {
11837 const char *p = c_getstr (src);
11838
11839 /* If the requested length is zero, or the src parameter string
11840 length is zero, return the dst parameter. */
11841 if (integer_zerop (len) || (p && *p == '\0'))
11842 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11843
11844 /* If the requested len is greater than or equal to the string
11845 length, call strcat. */
11846 if (TREE_CODE (len) == INTEGER_CST && p
11847 && compare_tree_int (len, strlen (p)) >= 0)
11848 {
11849 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11850
11851 /* If the replacement _DECL isn't initialized, don't do the
11852 transformation. */
11853 if (!fn)
11854 return NULL_TREE;
11855
11856 return build_call_expr_loc (loc, fn, 2, dst, src);
11857 }
11858 return NULL_TREE;
11859 }
11860 }
11861
11862 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11863 to the call.
11864
11865 Return NULL_TREE if no simplification was possible, otherwise return the
11866 simplified form of the call as a tree.
11867
11868 The simplified form may be a constant or other expression which
11869 computes the same value, but in a more efficient manner (including
11870 calls to other builtin functions).
11871
11872 The call may contain arguments which need to be evaluated, but
11873 which are not useful to determine the result of the call. In
11874 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11875 COMPOUND_EXPR will be an argument which must be evaluated.
11876 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11877 COMPOUND_EXPR in the chain will contain the tree for the simplified
11878 form of the builtin function call. */
11879
11880 static tree
11881 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11882 {
11883 if (!validate_arg (s1, POINTER_TYPE)
11884 || !validate_arg (s2, POINTER_TYPE))
11885 return NULL_TREE;
11886 else
11887 {
11888 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11889
11890 /* If both arguments are constants, evaluate at compile-time. */
11891 if (p1 && p2)
11892 {
11893 const size_t r = strspn (p1, p2);
11894 return build_int_cst (size_type_node, r);
11895 }
11896
11897 /* If either argument is "", return NULL_TREE. */
11898 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11899 /* Evaluate and ignore both arguments in case either one has
11900 side-effects. */
11901 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11902 s1, s2);
11903 return NULL_TREE;
11904 }
11905 }
11906
11907 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11908 to the call.
11909
11910 Return NULL_TREE if no simplification was possible, otherwise return the
11911 simplified form of the call as a tree.
11912
11913 The simplified form may be a constant or other expression which
11914 computes the same value, but in a more efficient manner (including
11915 calls to other builtin functions).
11916
11917 The call may contain arguments which need to be evaluated, but
11918 which are not useful to determine the result of the call. In
11919 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11920 COMPOUND_EXPR will be an argument which must be evaluated.
11921 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11922 COMPOUND_EXPR in the chain will contain the tree for the simplified
11923 form of the builtin function call. */
11924
11925 static tree
11926 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11927 {
11928 if (!validate_arg (s1, POINTER_TYPE)
11929 || !validate_arg (s2, POINTER_TYPE))
11930 return NULL_TREE;
11931 else
11932 {
11933 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11934
11935 /* If both arguments are constants, evaluate at compile-time. */
11936 if (p1 && p2)
11937 {
11938 const size_t r = strcspn (p1, p2);
11939 return build_int_cst (size_type_node, r);
11940 }
11941
11942 /* If the first argument is "", return NULL_TREE. */
11943 if (p1 && *p1 == '\0')
11944 {
11945 /* Evaluate and ignore argument s2 in case it has
11946 side-effects. */
11947 return omit_one_operand_loc (loc, size_type_node,
11948 size_zero_node, s2);
11949 }
11950
11951 /* If the second argument is "", return __builtin_strlen(s1). */
11952 if (p2 && *p2 == '\0')
11953 {
11954 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11955
11956 /* If the replacement _DECL isn't initialized, don't do the
11957 transformation. */
11958 if (!fn)
11959 return NULL_TREE;
11960
11961 return build_call_expr_loc (loc, fn, 1, s1);
11962 }
11963 return NULL_TREE;
11964 }
11965 }
11966
11967 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11968 to the call. IGNORE is true if the value returned
11969 by the builtin will be ignored. UNLOCKED is true is true if this
11970 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11971 the known length of the string. Return NULL_TREE if no simplification
11972 was possible. */
11973
11974 tree
11975 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11976 bool ignore, bool unlocked, tree len)
11977 {
11978 /* If we're using an unlocked function, assume the other unlocked
11979 functions exist explicitly. */
11980 tree const fn_fputc = (unlocked
11981 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11982 : builtin_decl_implicit (BUILT_IN_FPUTC));
11983 tree const fn_fwrite = (unlocked
11984 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11985 : builtin_decl_implicit (BUILT_IN_FWRITE));
11986
11987 /* If the return value is used, don't do the transformation. */
11988 if (!ignore)
11989 return NULL_TREE;
11990
11991 /* Verify the arguments in the original call. */
11992 if (!validate_arg (arg0, POINTER_TYPE)
11993 || !validate_arg (arg1, POINTER_TYPE))
11994 return NULL_TREE;
11995
11996 if (! len)
11997 len = c_strlen (arg0, 0);
11998
11999 /* Get the length of the string passed to fputs. If the length
12000 can't be determined, punt. */
12001 if (!len
12002 || TREE_CODE (len) != INTEGER_CST)
12003 return NULL_TREE;
12004
12005 switch (compare_tree_int (len, 1))
12006 {
12007 case -1: /* length is 0, delete the call entirely . */
12008 return omit_one_operand_loc (loc, integer_type_node,
12009 integer_zero_node, arg1);;
12010
12011 case 0: /* length is 1, call fputc. */
12012 {
12013 const char *p = c_getstr (arg0);
12014
12015 if (p != NULL)
12016 {
12017 if (fn_fputc)
12018 return build_call_expr_loc (loc, fn_fputc, 2,
12019 build_int_cst
12020 (integer_type_node, p[0]), arg1);
12021 else
12022 return NULL_TREE;
12023 }
12024 }
12025 /* FALLTHROUGH */
12026 case 1: /* length is greater than 1, call fwrite. */
12027 {
12028 /* If optimizing for size keep fputs. */
12029 if (optimize_function_for_size_p (cfun))
12030 return NULL_TREE;
12031 /* New argument list transforming fputs(string, stream) to
12032 fwrite(string, 1, len, stream). */
12033 if (fn_fwrite)
12034 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12035 size_one_node, len, arg1);
12036 else
12037 return NULL_TREE;
12038 }
12039 default:
12040 gcc_unreachable ();
12041 }
12042 return NULL_TREE;
12043 }
12044
12045 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12046 produced. False otherwise. This is done so that we don't output the error
12047 or warning twice or three times. */
12048
12049 bool
12050 fold_builtin_next_arg (tree exp, bool va_start_p)
12051 {
12052 tree fntype = TREE_TYPE (current_function_decl);
12053 int nargs = call_expr_nargs (exp);
12054 tree arg;
12055 /* There is good chance the current input_location points inside the
12056 definition of the va_start macro (perhaps on the token for
12057 builtin) in a system header, so warnings will not be emitted.
12058 Use the location in real source code. */
12059 source_location current_location =
12060 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12061 NULL);
12062
12063 if (!stdarg_p (fntype))
12064 {
12065 error ("%<va_start%> used in function with fixed args");
12066 return true;
12067 }
12068
12069 if (va_start_p)
12070 {
12071 if (va_start_p && (nargs != 2))
12072 {
12073 error ("wrong number of arguments to function %<va_start%>");
12074 return true;
12075 }
12076 arg = CALL_EXPR_ARG (exp, 1);
12077 }
12078 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12079 when we checked the arguments and if needed issued a warning. */
12080 else
12081 {
12082 if (nargs == 0)
12083 {
12084 /* Evidently an out of date version of <stdarg.h>; can't validate
12085 va_start's second argument, but can still work as intended. */
12086 warning_at (current_location,
12087 OPT_Wvarargs,
12088 "%<__builtin_next_arg%> called without an argument");
12089 return true;
12090 }
12091 else if (nargs > 1)
12092 {
12093 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12094 return true;
12095 }
12096 arg = CALL_EXPR_ARG (exp, 0);
12097 }
12098
12099 if (TREE_CODE (arg) == SSA_NAME)
12100 arg = SSA_NAME_VAR (arg);
12101
12102 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12103 or __builtin_next_arg (0) the first time we see it, after checking
12104 the arguments and if needed issuing a warning. */
12105 if (!integer_zerop (arg))
12106 {
12107 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12108
12109 /* Strip off all nops for the sake of the comparison. This
12110 is not quite the same as STRIP_NOPS. It does more.
12111 We must also strip off INDIRECT_EXPR for C++ reference
12112 parameters. */
12113 while (CONVERT_EXPR_P (arg)
12114 || TREE_CODE (arg) == INDIRECT_REF)
12115 arg = TREE_OPERAND (arg, 0);
12116 if (arg != last_parm)
12117 {
12118 /* FIXME: Sometimes with the tree optimizers we can get the
12119 not the last argument even though the user used the last
12120 argument. We just warn and set the arg to be the last
12121 argument so that we will get wrong-code because of
12122 it. */
12123 warning_at (current_location,
12124 OPT_Wvarargs,
12125 "second parameter of %<va_start%> not last named argument");
12126 }
12127
12128 /* Undefined by C99 7.15.1.4p4 (va_start):
12129 "If the parameter parmN is declared with the register storage
12130 class, with a function or array type, or with a type that is
12131 not compatible with the type that results after application of
12132 the default argument promotions, the behavior is undefined."
12133 */
12134 else if (DECL_REGISTER (arg))
12135 {
12136 warning_at (current_location,
12137 OPT_Wvarargs,
12138 "undefined behaviour when second parameter of "
12139 "%<va_start%> is declared with %<register%> storage");
12140 }
12141
12142 /* We want to verify the second parameter just once before the tree
12143 optimizers are run and then avoid keeping it in the tree,
12144 as otherwise we could warn even for correct code like:
12145 void foo (int i, ...)
12146 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12147 if (va_start_p)
12148 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12149 else
12150 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12151 }
12152 return false;
12153 }
12154
12155
12156 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12157 ORIG may be null if this is a 2-argument call. We don't attempt to
12158 simplify calls with more than 3 arguments.
12159
12160 Return NULL_TREE if no simplification was possible, otherwise return the
12161 simplified form of the call as a tree. If IGNORED is true, it means that
12162 the caller does not use the returned value of the function. */
12163
12164 static tree
12165 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12166 tree orig, int ignored)
12167 {
12168 tree call, retval;
12169 const char *fmt_str = NULL;
12170
12171 /* Verify the required arguments in the original call. We deal with two
12172 types of sprintf() calls: 'sprintf (str, fmt)' and
12173 'sprintf (dest, "%s", orig)'. */
12174 if (!validate_arg (dest, POINTER_TYPE)
12175 || !validate_arg (fmt, POINTER_TYPE))
12176 return NULL_TREE;
12177 if (orig && !validate_arg (orig, POINTER_TYPE))
12178 return NULL_TREE;
12179
12180 /* Check whether the format is a literal string constant. */
12181 fmt_str = c_getstr (fmt);
12182 if (fmt_str == NULL)
12183 return NULL_TREE;
12184
12185 call = NULL_TREE;
12186 retval = NULL_TREE;
12187
12188 if (!init_target_chars ())
12189 return NULL_TREE;
12190
12191 /* If the format doesn't contain % args or %%, use strcpy. */
12192 if (strchr (fmt_str, target_percent) == NULL)
12193 {
12194 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12195
12196 if (!fn)
12197 return NULL_TREE;
12198
12199 /* Don't optimize sprintf (buf, "abc", ptr++). */
12200 if (orig)
12201 return NULL_TREE;
12202
12203 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12204 'format' is known to contain no % formats. */
12205 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12206 if (!ignored)
12207 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12208 }
12209
12210 /* If the format is "%s", use strcpy if the result isn't used. */
12211 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12212 {
12213 tree fn;
12214 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12215
12216 if (!fn)
12217 return NULL_TREE;
12218
12219 /* Don't crash on sprintf (str1, "%s"). */
12220 if (!orig)
12221 return NULL_TREE;
12222
12223 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12224 if (!ignored)
12225 {
12226 retval = c_strlen (orig, 1);
12227 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12228 return NULL_TREE;
12229 }
12230 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12231 }
12232
12233 if (call && retval)
12234 {
12235 retval = fold_convert_loc
12236 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12237 retval);
12238 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12239 }
12240 else
12241 return call;
12242 }
12243
12244 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12245 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12246 attempt to simplify calls with more than 4 arguments.
12247
12248 Return NULL_TREE if no simplification was possible, otherwise return the
12249 simplified form of the call as a tree. If IGNORED is true, it means that
12250 the caller does not use the returned value of the function. */
12251
12252 static tree
12253 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12254 tree orig, int ignored)
12255 {
12256 tree call, retval;
12257 const char *fmt_str = NULL;
12258 unsigned HOST_WIDE_INT destlen;
12259
12260 /* Verify the required arguments in the original call. We deal with two
12261 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12262 'snprintf (dest, cst, "%s", orig)'. */
12263 if (!validate_arg (dest, POINTER_TYPE)
12264 || !validate_arg (destsize, INTEGER_TYPE)
12265 || !validate_arg (fmt, POINTER_TYPE))
12266 return NULL_TREE;
12267 if (orig && !validate_arg (orig, POINTER_TYPE))
12268 return NULL_TREE;
12269
12270 if (!tree_fits_uhwi_p (destsize))
12271 return NULL_TREE;
12272
12273 /* Check whether the format is a literal string constant. */
12274 fmt_str = c_getstr (fmt);
12275 if (fmt_str == NULL)
12276 return NULL_TREE;
12277
12278 call = NULL_TREE;
12279 retval = NULL_TREE;
12280
12281 if (!init_target_chars ())
12282 return NULL_TREE;
12283
12284 destlen = tree_to_uhwi (destsize);
12285
12286 /* If the format doesn't contain % args or %%, use strcpy. */
12287 if (strchr (fmt_str, target_percent) == NULL)
12288 {
12289 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12290 size_t len = strlen (fmt_str);
12291
12292 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12293 if (orig)
12294 return NULL_TREE;
12295
12296 /* We could expand this as
12297 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12298 or to
12299 memcpy (str, fmt_with_nul_at_cstm1, cst);
12300 but in the former case that might increase code size
12301 and in the latter case grow .rodata section too much.
12302 So punt for now. */
12303 if (len >= destlen)
12304 return NULL_TREE;
12305
12306 if (!fn)
12307 return NULL_TREE;
12308
12309 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12310 'format' is known to contain no % formats and
12311 strlen (fmt) < cst. */
12312 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12313
12314 if (!ignored)
12315 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12316 }
12317
12318 /* If the format is "%s", use strcpy if the result isn't used. */
12319 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12320 {
12321 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12322 unsigned HOST_WIDE_INT origlen;
12323
12324 /* Don't crash on snprintf (str1, cst, "%s"). */
12325 if (!orig)
12326 return NULL_TREE;
12327
12328 retval = c_strlen (orig, 1);
12329 if (!retval || !tree_fits_uhwi_p (retval))
12330 return NULL_TREE;
12331
12332 origlen = tree_to_uhwi (retval);
12333 /* We could expand this as
12334 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12335 or to
12336 memcpy (str1, str2_with_nul_at_cstm1, cst);
12337 but in the former case that might increase code size
12338 and in the latter case grow .rodata section too much.
12339 So punt for now. */
12340 if (origlen >= destlen)
12341 return NULL_TREE;
12342
12343 /* Convert snprintf (str1, cst, "%s", str2) into
12344 strcpy (str1, str2) if strlen (str2) < cst. */
12345 if (!fn)
12346 return NULL_TREE;
12347
12348 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12349
12350 if (ignored)
12351 retval = NULL_TREE;
12352 }
12353
12354 if (call && retval)
12355 {
12356 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12357 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12358 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12359 }
12360 else
12361 return call;
12362 }
12363
12364 /* Expand a call EXP to __builtin_object_size. */
12365
12366 rtx
12367 expand_builtin_object_size (tree exp)
12368 {
12369 tree ost;
12370 int object_size_type;
12371 tree fndecl = get_callee_fndecl (exp);
12372
12373 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12374 {
12375 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12376 exp, fndecl);
12377 expand_builtin_trap ();
12378 return const0_rtx;
12379 }
12380
12381 ost = CALL_EXPR_ARG (exp, 1);
12382 STRIP_NOPS (ost);
12383
12384 if (TREE_CODE (ost) != INTEGER_CST
12385 || tree_int_cst_sgn (ost) < 0
12386 || compare_tree_int (ost, 3) > 0)
12387 {
12388 error ("%Klast argument of %D is not integer constant between 0 and 3",
12389 exp, fndecl);
12390 expand_builtin_trap ();
12391 return const0_rtx;
12392 }
12393
12394 object_size_type = tree_to_shwi (ost);
12395
12396 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12397 }
12398
12399 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12400 FCODE is the BUILT_IN_* to use.
12401 Return NULL_RTX if we failed; the caller should emit a normal call,
12402 otherwise try to get the result in TARGET, if convenient (and in
12403 mode MODE if that's convenient). */
12404
12405 static rtx
12406 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12407 enum built_in_function fcode)
12408 {
12409 tree dest, src, len, size;
12410
12411 if (!validate_arglist (exp,
12412 POINTER_TYPE,
12413 fcode == BUILT_IN_MEMSET_CHK
12414 ? INTEGER_TYPE : POINTER_TYPE,
12415 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12416 return NULL_RTX;
12417
12418 dest = CALL_EXPR_ARG (exp, 0);
12419 src = CALL_EXPR_ARG (exp, 1);
12420 len = CALL_EXPR_ARG (exp, 2);
12421 size = CALL_EXPR_ARG (exp, 3);
12422
12423 if (! tree_fits_uhwi_p (size))
12424 return NULL_RTX;
12425
12426 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12427 {
12428 tree fn;
12429
12430 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12431 {
12432 warning_at (tree_nonartificial_location (exp),
12433 0, "%Kcall to %D will always overflow destination buffer",
12434 exp, get_callee_fndecl (exp));
12435 return NULL_RTX;
12436 }
12437
12438 fn = NULL_TREE;
12439 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12440 mem{cpy,pcpy,move,set} is available. */
12441 switch (fcode)
12442 {
12443 case BUILT_IN_MEMCPY_CHK:
12444 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12445 break;
12446 case BUILT_IN_MEMPCPY_CHK:
12447 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12448 break;
12449 case BUILT_IN_MEMMOVE_CHK:
12450 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12451 break;
12452 case BUILT_IN_MEMSET_CHK:
12453 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12454 break;
12455 default:
12456 break;
12457 }
12458
12459 if (! fn)
12460 return NULL_RTX;
12461
12462 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12463 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12464 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12465 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12466 }
12467 else if (fcode == BUILT_IN_MEMSET_CHK)
12468 return NULL_RTX;
12469 else
12470 {
12471 unsigned int dest_align = get_pointer_alignment (dest);
12472
12473 /* If DEST is not a pointer type, call the normal function. */
12474 if (dest_align == 0)
12475 return NULL_RTX;
12476
12477 /* If SRC and DEST are the same (and not volatile), do nothing. */
12478 if (operand_equal_p (src, dest, 0))
12479 {
12480 tree expr;
12481
12482 if (fcode != BUILT_IN_MEMPCPY_CHK)
12483 {
12484 /* Evaluate and ignore LEN in case it has side-effects. */
12485 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12486 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12487 }
12488
12489 expr = fold_build_pointer_plus (dest, len);
12490 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12491 }
12492
12493 /* __memmove_chk special case. */
12494 if (fcode == BUILT_IN_MEMMOVE_CHK)
12495 {
12496 unsigned int src_align = get_pointer_alignment (src);
12497
12498 if (src_align == 0)
12499 return NULL_RTX;
12500
12501 /* If src is categorized for a readonly section we can use
12502 normal __memcpy_chk. */
12503 if (readonly_data_expr (src))
12504 {
12505 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12506 if (!fn)
12507 return NULL_RTX;
12508 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12509 dest, src, len, size);
12510 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12511 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12512 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12513 }
12514 }
12515 return NULL_RTX;
12516 }
12517 }
12518
12519 /* Emit warning if a buffer overflow is detected at compile time. */
12520
12521 static void
12522 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12523 {
12524 int is_strlen = 0;
12525 tree len, size;
12526 location_t loc = tree_nonartificial_location (exp);
12527
12528 switch (fcode)
12529 {
12530 case BUILT_IN_STRCPY_CHK:
12531 case BUILT_IN_STPCPY_CHK:
12532 /* For __strcat_chk the warning will be emitted only if overflowing
12533 by at least strlen (dest) + 1 bytes. */
12534 case BUILT_IN_STRCAT_CHK:
12535 len = CALL_EXPR_ARG (exp, 1);
12536 size = CALL_EXPR_ARG (exp, 2);
12537 is_strlen = 1;
12538 break;
12539 case BUILT_IN_STRNCAT_CHK:
12540 case BUILT_IN_STRNCPY_CHK:
12541 case BUILT_IN_STPNCPY_CHK:
12542 len = CALL_EXPR_ARG (exp, 2);
12543 size = CALL_EXPR_ARG (exp, 3);
12544 break;
12545 case BUILT_IN_SNPRINTF_CHK:
12546 case BUILT_IN_VSNPRINTF_CHK:
12547 len = CALL_EXPR_ARG (exp, 1);
12548 size = CALL_EXPR_ARG (exp, 3);
12549 break;
12550 default:
12551 gcc_unreachable ();
12552 }
12553
12554 if (!len || !size)
12555 return;
12556
12557 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12558 return;
12559
12560 if (is_strlen)
12561 {
12562 len = c_strlen (len, 1);
12563 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12564 return;
12565 }
12566 else if (fcode == BUILT_IN_STRNCAT_CHK)
12567 {
12568 tree src = CALL_EXPR_ARG (exp, 1);
12569 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12570 return;
12571 src = c_strlen (src, 1);
12572 if (! src || ! tree_fits_uhwi_p (src))
12573 {
12574 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12575 exp, get_callee_fndecl (exp));
12576 return;
12577 }
12578 else if (tree_int_cst_lt (src, size))
12579 return;
12580 }
12581 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12582 return;
12583
12584 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12585 exp, get_callee_fndecl (exp));
12586 }
12587
12588 /* Emit warning if a buffer overflow is detected at compile time
12589 in __sprintf_chk/__vsprintf_chk calls. */
12590
12591 static void
12592 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12593 {
12594 tree size, len, fmt;
12595 const char *fmt_str;
12596 int nargs = call_expr_nargs (exp);
12597
12598 /* Verify the required arguments in the original call. */
12599
12600 if (nargs < 4)
12601 return;
12602 size = CALL_EXPR_ARG (exp, 2);
12603 fmt = CALL_EXPR_ARG (exp, 3);
12604
12605 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12606 return;
12607
12608 /* Check whether the format is a literal string constant. */
12609 fmt_str = c_getstr (fmt);
12610 if (fmt_str == NULL)
12611 return;
12612
12613 if (!init_target_chars ())
12614 return;
12615
12616 /* If the format doesn't contain % args or %%, we know its size. */
12617 if (strchr (fmt_str, target_percent) == 0)
12618 len = build_int_cstu (size_type_node, strlen (fmt_str));
12619 /* If the format is "%s" and first ... argument is a string literal,
12620 we know it too. */
12621 else if (fcode == BUILT_IN_SPRINTF_CHK
12622 && strcmp (fmt_str, target_percent_s) == 0)
12623 {
12624 tree arg;
12625
12626 if (nargs < 5)
12627 return;
12628 arg = CALL_EXPR_ARG (exp, 4);
12629 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12630 return;
12631
12632 len = c_strlen (arg, 1);
12633 if (!len || ! tree_fits_uhwi_p (len))
12634 return;
12635 }
12636 else
12637 return;
12638
12639 if (! tree_int_cst_lt (len, size))
12640 warning_at (tree_nonartificial_location (exp),
12641 0, "%Kcall to %D will always overflow destination buffer",
12642 exp, get_callee_fndecl (exp));
12643 }
12644
12645 /* Emit warning if a free is called with address of a variable. */
12646
12647 static void
12648 maybe_emit_free_warning (tree exp)
12649 {
12650 tree arg = CALL_EXPR_ARG (exp, 0);
12651
12652 STRIP_NOPS (arg);
12653 if (TREE_CODE (arg) != ADDR_EXPR)
12654 return;
12655
12656 arg = get_base_address (TREE_OPERAND (arg, 0));
12657 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12658 return;
12659
12660 if (SSA_VAR_P (arg))
12661 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12662 "%Kattempt to free a non-heap object %qD", exp, arg);
12663 else
12664 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12665 "%Kattempt to free a non-heap object", exp);
12666 }
12667
12668 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12669 if possible. */
12670
12671 tree
12672 fold_builtin_object_size (tree ptr, tree ost)
12673 {
12674 unsigned HOST_WIDE_INT bytes;
12675 int object_size_type;
12676
12677 if (!validate_arg (ptr, POINTER_TYPE)
12678 || !validate_arg (ost, INTEGER_TYPE))
12679 return NULL_TREE;
12680
12681 STRIP_NOPS (ost);
12682
12683 if (TREE_CODE (ost) != INTEGER_CST
12684 || tree_int_cst_sgn (ost) < 0
12685 || compare_tree_int (ost, 3) > 0)
12686 return NULL_TREE;
12687
12688 object_size_type = tree_to_shwi (ost);
12689
12690 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12691 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12692 and (size_t) 0 for types 2 and 3. */
12693 if (TREE_SIDE_EFFECTS (ptr))
12694 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12695
12696 if (TREE_CODE (ptr) == ADDR_EXPR)
12697 {
12698 bytes = compute_builtin_object_size (ptr, object_size_type);
12699 if (double_int_fits_to_tree_p (size_type_node,
12700 double_int::from_uhwi (bytes)))
12701 return build_int_cstu (size_type_node, bytes);
12702 }
12703 else if (TREE_CODE (ptr) == SSA_NAME)
12704 {
12705 /* If object size is not known yet, delay folding until
12706 later. Maybe subsequent passes will help determining
12707 it. */
12708 bytes = compute_builtin_object_size (ptr, object_size_type);
12709 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12710 && double_int_fits_to_tree_p (size_type_node,
12711 double_int::from_uhwi (bytes)))
12712 return build_int_cstu (size_type_node, bytes);
12713 }
12714
12715 return NULL_TREE;
12716 }
12717
12718 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12719 DEST, SRC, LEN, and SIZE are the arguments to the call.
12720 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12721 code of the builtin. If MAXLEN is not NULL, it is maximum length
12722 passed as third argument. */
12723
12724 tree
12725 fold_builtin_memory_chk (location_t loc, tree fndecl,
12726 tree dest, tree src, tree len, tree size,
12727 tree maxlen, bool ignore,
12728 enum built_in_function fcode)
12729 {
12730 tree fn;
12731
12732 if (!validate_arg (dest, POINTER_TYPE)
12733 || !validate_arg (src,
12734 (fcode == BUILT_IN_MEMSET_CHK
12735 ? INTEGER_TYPE : POINTER_TYPE))
12736 || !validate_arg (len, INTEGER_TYPE)
12737 || !validate_arg (size, INTEGER_TYPE))
12738 return NULL_TREE;
12739
12740 /* If SRC and DEST are the same (and not volatile), return DEST
12741 (resp. DEST+LEN for __mempcpy_chk). */
12742 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12743 {
12744 if (fcode != BUILT_IN_MEMPCPY_CHK)
12745 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12746 dest, len);
12747 else
12748 {
12749 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12750 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12751 }
12752 }
12753
12754 if (! tree_fits_uhwi_p (size))
12755 return NULL_TREE;
12756
12757 if (! integer_all_onesp (size))
12758 {
12759 if (! tree_fits_uhwi_p (len))
12760 {
12761 /* If LEN is not constant, try MAXLEN too.
12762 For MAXLEN only allow optimizing into non-_ocs function
12763 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12764 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12765 {
12766 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12767 {
12768 /* (void) __mempcpy_chk () can be optimized into
12769 (void) __memcpy_chk (). */
12770 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12771 if (!fn)
12772 return NULL_TREE;
12773
12774 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12775 }
12776 return NULL_TREE;
12777 }
12778 }
12779 else
12780 maxlen = len;
12781
12782 if (tree_int_cst_lt (size, maxlen))
12783 return NULL_TREE;
12784 }
12785
12786 fn = NULL_TREE;
12787 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12788 mem{cpy,pcpy,move,set} is available. */
12789 switch (fcode)
12790 {
12791 case BUILT_IN_MEMCPY_CHK:
12792 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12793 break;
12794 case BUILT_IN_MEMPCPY_CHK:
12795 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12796 break;
12797 case BUILT_IN_MEMMOVE_CHK:
12798 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12799 break;
12800 case BUILT_IN_MEMSET_CHK:
12801 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12802 break;
12803 default:
12804 break;
12805 }
12806
12807 if (!fn)
12808 return NULL_TREE;
12809
12810 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12811 }
12812
12813 /* Fold a call to the __st[rp]cpy_chk builtin.
12814 DEST, SRC, and SIZE are the arguments to the call.
12815 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12816 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12817 strings passed as second argument. */
12818
12819 tree
12820 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12821 tree src, tree size,
12822 tree maxlen, bool ignore,
12823 enum built_in_function fcode)
12824 {
12825 tree len, fn;
12826
12827 if (!validate_arg (dest, POINTER_TYPE)
12828 || !validate_arg (src, POINTER_TYPE)
12829 || !validate_arg (size, INTEGER_TYPE))
12830 return NULL_TREE;
12831
12832 /* If SRC and DEST are the same (and not volatile), return DEST. */
12833 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12834 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12835
12836 if (! tree_fits_uhwi_p (size))
12837 return NULL_TREE;
12838
12839 if (! integer_all_onesp (size))
12840 {
12841 len = c_strlen (src, 1);
12842 if (! len || ! tree_fits_uhwi_p (len))
12843 {
12844 /* If LEN is not constant, try MAXLEN too.
12845 For MAXLEN only allow optimizing into non-_ocs function
12846 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12847 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12848 {
12849 if (fcode == BUILT_IN_STPCPY_CHK)
12850 {
12851 if (! ignore)
12852 return NULL_TREE;
12853
12854 /* If return value of __stpcpy_chk is ignored,
12855 optimize into __strcpy_chk. */
12856 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12857 if (!fn)
12858 return NULL_TREE;
12859
12860 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12861 }
12862
12863 if (! len || TREE_SIDE_EFFECTS (len))
12864 return NULL_TREE;
12865
12866 /* If c_strlen returned something, but not a constant,
12867 transform __strcpy_chk into __memcpy_chk. */
12868 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12869 if (!fn)
12870 return NULL_TREE;
12871
12872 len = fold_convert_loc (loc, size_type_node, len);
12873 len = size_binop_loc (loc, PLUS_EXPR, len,
12874 build_int_cst (size_type_node, 1));
12875 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12876 build_call_expr_loc (loc, fn, 4,
12877 dest, src, len, size));
12878 }
12879 }
12880 else
12881 maxlen = len;
12882
12883 if (! tree_int_cst_lt (maxlen, size))
12884 return NULL_TREE;
12885 }
12886
12887 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12888 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12889 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12890 if (!fn)
12891 return NULL_TREE;
12892
12893 return build_call_expr_loc (loc, fn, 2, dest, src);
12894 }
12895
12896 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12897 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12898 length passed as third argument. IGNORE is true if return value can be
12899 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12900
12901 tree
12902 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12903 tree len, tree size, tree maxlen, bool ignore,
12904 enum built_in_function fcode)
12905 {
12906 tree fn;
12907
12908 if (!validate_arg (dest, POINTER_TYPE)
12909 || !validate_arg (src, POINTER_TYPE)
12910 || !validate_arg (len, INTEGER_TYPE)
12911 || !validate_arg (size, INTEGER_TYPE))
12912 return NULL_TREE;
12913
12914 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12915 {
12916 /* If return value of __stpncpy_chk is ignored,
12917 optimize into __strncpy_chk. */
12918 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12919 if (fn)
12920 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12921 }
12922
12923 if (! tree_fits_uhwi_p (size))
12924 return NULL_TREE;
12925
12926 if (! integer_all_onesp (size))
12927 {
12928 if (! tree_fits_uhwi_p (len))
12929 {
12930 /* If LEN is not constant, try MAXLEN too.
12931 For MAXLEN only allow optimizing into non-_ocs function
12932 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12933 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12934 return NULL_TREE;
12935 }
12936 else
12937 maxlen = len;
12938
12939 if (tree_int_cst_lt (size, maxlen))
12940 return NULL_TREE;
12941 }
12942
12943 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12944 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12945 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12946 if (!fn)
12947 return NULL_TREE;
12948
12949 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12950 }
12951
12952 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12953 are the arguments to the call. */
12954
12955 static tree
12956 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12957 tree src, tree size)
12958 {
12959 tree fn;
12960 const char *p;
12961
12962 if (!validate_arg (dest, POINTER_TYPE)
12963 || !validate_arg (src, POINTER_TYPE)
12964 || !validate_arg (size, INTEGER_TYPE))
12965 return NULL_TREE;
12966
12967 p = c_getstr (src);
12968 /* If the SRC parameter is "", return DEST. */
12969 if (p && *p == '\0')
12970 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12971
12972 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12973 return NULL_TREE;
12974
12975 /* If __builtin_strcat_chk is used, assume strcat is available. */
12976 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12977 if (!fn)
12978 return NULL_TREE;
12979
12980 return build_call_expr_loc (loc, fn, 2, dest, src);
12981 }
12982
12983 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12984 LEN, and SIZE. */
12985
12986 static tree
12987 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12988 tree dest, tree src, tree len, tree size)
12989 {
12990 tree fn;
12991 const char *p;
12992
12993 if (!validate_arg (dest, POINTER_TYPE)
12994 || !validate_arg (src, POINTER_TYPE)
12995 || !validate_arg (size, INTEGER_TYPE)
12996 || !validate_arg (size, INTEGER_TYPE))
12997 return NULL_TREE;
12998
12999 p = c_getstr (src);
13000 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13001 if (p && *p == '\0')
13002 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13003 else if (integer_zerop (len))
13004 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13005
13006 if (! tree_fits_uhwi_p (size))
13007 return NULL_TREE;
13008
13009 if (! integer_all_onesp (size))
13010 {
13011 tree src_len = c_strlen (src, 1);
13012 if (src_len
13013 && tree_fits_uhwi_p (src_len)
13014 && tree_fits_uhwi_p (len)
13015 && ! tree_int_cst_lt (len, src_len))
13016 {
13017 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13018 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13019 if (!fn)
13020 return NULL_TREE;
13021
13022 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13023 }
13024 return NULL_TREE;
13025 }
13026
13027 /* If __builtin_strncat_chk is used, assume strncat is available. */
13028 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13029 if (!fn)
13030 return NULL_TREE;
13031
13032 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13033 }
13034
13035 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13036 Return NULL_TREE if a normal call should be emitted rather than
13037 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13038 or BUILT_IN_VSPRINTF_CHK. */
13039
13040 static tree
13041 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13042 enum built_in_function fcode)
13043 {
13044 tree dest, size, len, fn, fmt, flag;
13045 const char *fmt_str;
13046
13047 /* Verify the required arguments in the original call. */
13048 if (nargs < 4)
13049 return NULL_TREE;
13050 dest = args[0];
13051 if (!validate_arg (dest, POINTER_TYPE))
13052 return NULL_TREE;
13053 flag = args[1];
13054 if (!validate_arg (flag, INTEGER_TYPE))
13055 return NULL_TREE;
13056 size = args[2];
13057 if (!validate_arg (size, INTEGER_TYPE))
13058 return NULL_TREE;
13059 fmt = args[3];
13060 if (!validate_arg (fmt, POINTER_TYPE))
13061 return NULL_TREE;
13062
13063 if (! tree_fits_uhwi_p (size))
13064 return NULL_TREE;
13065
13066 len = NULL_TREE;
13067
13068 if (!init_target_chars ())
13069 return NULL_TREE;
13070
13071 /* Check whether the format is a literal string constant. */
13072 fmt_str = c_getstr (fmt);
13073 if (fmt_str != NULL)
13074 {
13075 /* If the format doesn't contain % args or %%, we know the size. */
13076 if (strchr (fmt_str, target_percent) == 0)
13077 {
13078 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13079 len = build_int_cstu (size_type_node, strlen (fmt_str));
13080 }
13081 /* If the format is "%s" and first ... argument is a string literal,
13082 we know the size too. */
13083 else if (fcode == BUILT_IN_SPRINTF_CHK
13084 && strcmp (fmt_str, target_percent_s) == 0)
13085 {
13086 tree arg;
13087
13088 if (nargs == 5)
13089 {
13090 arg = args[4];
13091 if (validate_arg (arg, POINTER_TYPE))
13092 {
13093 len = c_strlen (arg, 1);
13094 if (! len || ! tree_fits_uhwi_p (len))
13095 len = NULL_TREE;
13096 }
13097 }
13098 }
13099 }
13100
13101 if (! integer_all_onesp (size))
13102 {
13103 if (! len || ! tree_int_cst_lt (len, size))
13104 return NULL_TREE;
13105 }
13106
13107 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13108 or if format doesn't contain % chars or is "%s". */
13109 if (! integer_zerop (flag))
13110 {
13111 if (fmt_str == NULL)
13112 return NULL_TREE;
13113 if (strchr (fmt_str, target_percent) != NULL
13114 && strcmp (fmt_str, target_percent_s))
13115 return NULL_TREE;
13116 }
13117
13118 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13119 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13120 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13121 if (!fn)
13122 return NULL_TREE;
13123
13124 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13125 }
13126
13127 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13128 a normal call should be emitted rather than expanding the function
13129 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13130
13131 static tree
13132 fold_builtin_sprintf_chk (location_t loc, tree exp,
13133 enum built_in_function fcode)
13134 {
13135 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13136 CALL_EXPR_ARGP (exp), fcode);
13137 }
13138
13139 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13140 NULL_TREE if a normal call should be emitted rather than expanding
13141 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13142 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13143 passed as second argument. */
13144
13145 static tree
13146 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13147 tree maxlen, enum built_in_function fcode)
13148 {
13149 tree dest, size, len, fn, fmt, flag;
13150 const char *fmt_str;
13151
13152 /* Verify the required arguments in the original call. */
13153 if (nargs < 5)
13154 return NULL_TREE;
13155 dest = args[0];
13156 if (!validate_arg (dest, POINTER_TYPE))
13157 return NULL_TREE;
13158 len = args[1];
13159 if (!validate_arg (len, INTEGER_TYPE))
13160 return NULL_TREE;
13161 flag = args[2];
13162 if (!validate_arg (flag, INTEGER_TYPE))
13163 return NULL_TREE;
13164 size = args[3];
13165 if (!validate_arg (size, INTEGER_TYPE))
13166 return NULL_TREE;
13167 fmt = args[4];
13168 if (!validate_arg (fmt, POINTER_TYPE))
13169 return NULL_TREE;
13170
13171 if (! tree_fits_uhwi_p (size))
13172 return NULL_TREE;
13173
13174 if (! integer_all_onesp (size))
13175 {
13176 if (! tree_fits_uhwi_p (len))
13177 {
13178 /* If LEN is not constant, try MAXLEN too.
13179 For MAXLEN only allow optimizing into non-_ocs function
13180 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13181 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13182 return NULL_TREE;
13183 }
13184 else
13185 maxlen = len;
13186
13187 if (tree_int_cst_lt (size, maxlen))
13188 return NULL_TREE;
13189 }
13190
13191 if (!init_target_chars ())
13192 return NULL_TREE;
13193
13194 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13195 or if format doesn't contain % chars or is "%s". */
13196 if (! integer_zerop (flag))
13197 {
13198 fmt_str = c_getstr (fmt);
13199 if (fmt_str == NULL)
13200 return NULL_TREE;
13201 if (strchr (fmt_str, target_percent) != NULL
13202 && strcmp (fmt_str, target_percent_s))
13203 return NULL_TREE;
13204 }
13205
13206 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13207 available. */
13208 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13209 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13210 if (!fn)
13211 return NULL_TREE;
13212
13213 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13214 }
13215
13216 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13217 a normal call should be emitted rather than expanding the function
13218 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13219 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13220 passed as second argument. */
13221
13222 static tree
13223 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13224 enum built_in_function fcode)
13225 {
13226 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13227 CALL_EXPR_ARGP (exp), maxlen, fcode);
13228 }
13229
13230 /* Builtins with folding operations that operate on "..." arguments
13231 need special handling; we need to store the arguments in a convenient
13232 data structure before attempting any folding. Fortunately there are
13233 only a few builtins that fall into this category. FNDECL is the
13234 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13235 result of the function call is ignored. */
13236
13237 static tree
13238 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13239 bool ignore ATTRIBUTE_UNUSED)
13240 {
13241 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13242 tree ret = NULL_TREE;
13243
13244 switch (fcode)
13245 {
13246 case BUILT_IN_SPRINTF_CHK:
13247 case BUILT_IN_VSPRINTF_CHK:
13248 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13249 break;
13250
13251 case BUILT_IN_SNPRINTF_CHK:
13252 case BUILT_IN_VSNPRINTF_CHK:
13253 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13254 break;
13255
13256 case BUILT_IN_FPCLASSIFY:
13257 ret = fold_builtin_fpclassify (loc, exp);
13258 break;
13259
13260 default:
13261 break;
13262 }
13263 if (ret)
13264 {
13265 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13266 SET_EXPR_LOCATION (ret, loc);
13267 TREE_NO_WARNING (ret) = 1;
13268 return ret;
13269 }
13270 return NULL_TREE;
13271 }
13272
13273 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13274 FMT and ARG are the arguments to the call; we don't fold cases with
13275 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13276
13277 Return NULL_TREE if no simplification was possible, otherwise return the
13278 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13279 code of the function to be simplified. */
13280
13281 static tree
13282 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13283 tree arg, bool ignore,
13284 enum built_in_function fcode)
13285 {
13286 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13287 const char *fmt_str = NULL;
13288
13289 /* If the return value is used, don't do the transformation. */
13290 if (! ignore)
13291 return NULL_TREE;
13292
13293 /* Verify the required arguments in the original call. */
13294 if (!validate_arg (fmt, POINTER_TYPE))
13295 return NULL_TREE;
13296
13297 /* Check whether the format is a literal string constant. */
13298 fmt_str = c_getstr (fmt);
13299 if (fmt_str == NULL)
13300 return NULL_TREE;
13301
13302 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13303 {
13304 /* If we're using an unlocked function, assume the other
13305 unlocked functions exist explicitly. */
13306 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13307 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13308 }
13309 else
13310 {
13311 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13312 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13313 }
13314
13315 if (!init_target_chars ())
13316 return NULL_TREE;
13317
13318 if (strcmp (fmt_str, target_percent_s) == 0
13319 || strchr (fmt_str, target_percent) == NULL)
13320 {
13321 const char *str;
13322
13323 if (strcmp (fmt_str, target_percent_s) == 0)
13324 {
13325 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13326 return NULL_TREE;
13327
13328 if (!arg || !validate_arg (arg, POINTER_TYPE))
13329 return NULL_TREE;
13330
13331 str = c_getstr (arg);
13332 if (str == NULL)
13333 return NULL_TREE;
13334 }
13335 else
13336 {
13337 /* The format specifier doesn't contain any '%' characters. */
13338 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13339 && arg)
13340 return NULL_TREE;
13341 str = fmt_str;
13342 }
13343
13344 /* If the string was "", printf does nothing. */
13345 if (str[0] == '\0')
13346 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13347
13348 /* If the string has length of 1, call putchar. */
13349 if (str[1] == '\0')
13350 {
13351 /* Given printf("c"), (where c is any one character,)
13352 convert "c"[0] to an int and pass that to the replacement
13353 function. */
13354 newarg = build_int_cst (integer_type_node, str[0]);
13355 if (fn_putchar)
13356 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13357 }
13358 else
13359 {
13360 /* If the string was "string\n", call puts("string"). */
13361 size_t len = strlen (str);
13362 if ((unsigned char)str[len - 1] == target_newline
13363 && (size_t) (int) len == len
13364 && (int) len > 0)
13365 {
13366 char *newstr;
13367 tree offset_node, string_cst;
13368
13369 /* Create a NUL-terminated string that's one char shorter
13370 than the original, stripping off the trailing '\n'. */
13371 newarg = build_string_literal (len, str);
13372 string_cst = string_constant (newarg, &offset_node);
13373 gcc_checking_assert (string_cst
13374 && (TREE_STRING_LENGTH (string_cst)
13375 == (int) len)
13376 && integer_zerop (offset_node)
13377 && (unsigned char)
13378 TREE_STRING_POINTER (string_cst)[len - 1]
13379 == target_newline);
13380 /* build_string_literal creates a new STRING_CST,
13381 modify it in place to avoid double copying. */
13382 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13383 newstr[len - 1] = '\0';
13384 if (fn_puts)
13385 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13386 }
13387 else
13388 /* We'd like to arrange to call fputs(string,stdout) here,
13389 but we need stdout and don't have a way to get it yet. */
13390 return NULL_TREE;
13391 }
13392 }
13393
13394 /* The other optimizations can be done only on the non-va_list variants. */
13395 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13396 return NULL_TREE;
13397
13398 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13399 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13400 {
13401 if (!arg || !validate_arg (arg, POINTER_TYPE))
13402 return NULL_TREE;
13403 if (fn_puts)
13404 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13405 }
13406
13407 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13408 else if (strcmp (fmt_str, target_percent_c) == 0)
13409 {
13410 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13411 return NULL_TREE;
13412 if (fn_putchar)
13413 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13414 }
13415
13416 if (!call)
13417 return NULL_TREE;
13418
13419 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13420 }
13421
13422 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13423 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13424 more than 3 arguments, and ARG may be null in the 2-argument case.
13425
13426 Return NULL_TREE if no simplification was possible, otherwise return the
13427 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13428 code of the function to be simplified. */
13429
13430 static tree
13431 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13432 tree fmt, tree arg, bool ignore,
13433 enum built_in_function fcode)
13434 {
13435 tree fn_fputc, fn_fputs, call = NULL_TREE;
13436 const char *fmt_str = NULL;
13437
13438 /* If the return value is used, don't do the transformation. */
13439 if (! ignore)
13440 return NULL_TREE;
13441
13442 /* Verify the required arguments in the original call. */
13443 if (!validate_arg (fp, POINTER_TYPE))
13444 return NULL_TREE;
13445 if (!validate_arg (fmt, POINTER_TYPE))
13446 return NULL_TREE;
13447
13448 /* Check whether the format is a literal string constant. */
13449 fmt_str = c_getstr (fmt);
13450 if (fmt_str == NULL)
13451 return NULL_TREE;
13452
13453 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13454 {
13455 /* If we're using an unlocked function, assume the other
13456 unlocked functions exist explicitly. */
13457 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13458 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13459 }
13460 else
13461 {
13462 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13463 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13464 }
13465
13466 if (!init_target_chars ())
13467 return NULL_TREE;
13468
13469 /* If the format doesn't contain % args or %%, use strcpy. */
13470 if (strchr (fmt_str, target_percent) == NULL)
13471 {
13472 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13473 && arg)
13474 return NULL_TREE;
13475
13476 /* If the format specifier was "", fprintf does nothing. */
13477 if (fmt_str[0] == '\0')
13478 {
13479 /* If FP has side-effects, just wait until gimplification is
13480 done. */
13481 if (TREE_SIDE_EFFECTS (fp))
13482 return NULL_TREE;
13483
13484 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13485 }
13486
13487 /* When "string" doesn't contain %, replace all cases of
13488 fprintf (fp, string) with fputs (string, fp). The fputs
13489 builtin will take care of special cases like length == 1. */
13490 if (fn_fputs)
13491 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13492 }
13493
13494 /* The other optimizations can be done only on the non-va_list variants. */
13495 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13496 return NULL_TREE;
13497
13498 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13499 else if (strcmp (fmt_str, target_percent_s) == 0)
13500 {
13501 if (!arg || !validate_arg (arg, POINTER_TYPE))
13502 return NULL_TREE;
13503 if (fn_fputs)
13504 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13505 }
13506
13507 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13508 else if (strcmp (fmt_str, target_percent_c) == 0)
13509 {
13510 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13511 return NULL_TREE;
13512 if (fn_fputc)
13513 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13514 }
13515
13516 if (!call)
13517 return NULL_TREE;
13518 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13519 }
13520
13521 /* Initialize format string characters in the target charset. */
13522
13523 static bool
13524 init_target_chars (void)
13525 {
13526 static bool init;
13527 if (!init)
13528 {
13529 target_newline = lang_hooks.to_target_charset ('\n');
13530 target_percent = lang_hooks.to_target_charset ('%');
13531 target_c = lang_hooks.to_target_charset ('c');
13532 target_s = lang_hooks.to_target_charset ('s');
13533 if (target_newline == 0 || target_percent == 0 || target_c == 0
13534 || target_s == 0)
13535 return false;
13536
13537 target_percent_c[0] = target_percent;
13538 target_percent_c[1] = target_c;
13539 target_percent_c[2] = '\0';
13540
13541 target_percent_s[0] = target_percent;
13542 target_percent_s[1] = target_s;
13543 target_percent_s[2] = '\0';
13544
13545 target_percent_s_newline[0] = target_percent;
13546 target_percent_s_newline[1] = target_s;
13547 target_percent_s_newline[2] = target_newline;
13548 target_percent_s_newline[3] = '\0';
13549
13550 init = true;
13551 }
13552 return true;
13553 }
13554
13555 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13556 and no overflow/underflow occurred. INEXACT is true if M was not
13557 exactly calculated. TYPE is the tree type for the result. This
13558 function assumes that you cleared the MPFR flags and then
13559 calculated M to see if anything subsequently set a flag prior to
13560 entering this function. Return NULL_TREE if any checks fail. */
13561
13562 static tree
13563 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13564 {
13565 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13566 overflow/underflow occurred. If -frounding-math, proceed iff the
13567 result of calling FUNC was exact. */
13568 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13569 && (!flag_rounding_math || !inexact))
13570 {
13571 REAL_VALUE_TYPE rr;
13572
13573 real_from_mpfr (&rr, m, type, GMP_RNDN);
13574 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13575 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13576 but the mpft_t is not, then we underflowed in the
13577 conversion. */
13578 if (real_isfinite (&rr)
13579 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13580 {
13581 REAL_VALUE_TYPE rmode;
13582
13583 real_convert (&rmode, TYPE_MODE (type), &rr);
13584 /* Proceed iff the specified mode can hold the value. */
13585 if (real_identical (&rmode, &rr))
13586 return build_real (type, rmode);
13587 }
13588 }
13589 return NULL_TREE;
13590 }
13591
13592 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13593 number and no overflow/underflow occurred. INEXACT is true if M
13594 was not exactly calculated. TYPE is the tree type for the result.
13595 This function assumes that you cleared the MPFR flags and then
13596 calculated M to see if anything subsequently set a flag prior to
13597 entering this function. Return NULL_TREE if any checks fail, if
13598 FORCE_CONVERT is true, then bypass the checks. */
13599
13600 static tree
13601 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13602 {
13603 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13604 overflow/underflow occurred. If -frounding-math, proceed iff the
13605 result of calling FUNC was exact. */
13606 if (force_convert
13607 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13608 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13609 && (!flag_rounding_math || !inexact)))
13610 {
13611 REAL_VALUE_TYPE re, im;
13612
13613 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13614 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13615 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13616 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13617 but the mpft_t is not, then we underflowed in the
13618 conversion. */
13619 if (force_convert
13620 || (real_isfinite (&re) && real_isfinite (&im)
13621 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13622 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13623 {
13624 REAL_VALUE_TYPE re_mode, im_mode;
13625
13626 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13627 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13628 /* Proceed iff the specified mode can hold the value. */
13629 if (force_convert
13630 || (real_identical (&re_mode, &re)
13631 && real_identical (&im_mode, &im)))
13632 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13633 build_real (TREE_TYPE (type), im_mode));
13634 }
13635 }
13636 return NULL_TREE;
13637 }
13638
13639 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13640 FUNC on it and return the resulting value as a tree with type TYPE.
13641 If MIN and/or MAX are not NULL, then the supplied ARG must be
13642 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13643 acceptable values, otherwise they are not. The mpfr precision is
13644 set to the precision of TYPE. We assume that function FUNC returns
13645 zero if the result could be calculated exactly within the requested
13646 precision. */
13647
13648 static tree
13649 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13650 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13651 bool inclusive)
13652 {
13653 tree result = NULL_TREE;
13654
13655 STRIP_NOPS (arg);
13656
13657 /* To proceed, MPFR must exactly represent the target floating point
13658 format, which only happens when the target base equals two. */
13659 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13660 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13661 {
13662 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13663
13664 if (real_isfinite (ra)
13665 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13666 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13667 {
13668 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13669 const int prec = fmt->p;
13670 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13671 int inexact;
13672 mpfr_t m;
13673
13674 mpfr_init2 (m, prec);
13675 mpfr_from_real (m, ra, GMP_RNDN);
13676 mpfr_clear_flags ();
13677 inexact = func (m, m, rnd);
13678 result = do_mpfr_ckconv (m, type, inexact);
13679 mpfr_clear (m);
13680 }
13681 }
13682
13683 return result;
13684 }
13685
13686 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13687 FUNC on it and return the resulting value as a tree with type TYPE.
13688 The mpfr precision is set to the precision of TYPE. We assume that
13689 function FUNC returns zero if the result could be calculated
13690 exactly within the requested precision. */
13691
13692 static tree
13693 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13694 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13695 {
13696 tree result = NULL_TREE;
13697
13698 STRIP_NOPS (arg1);
13699 STRIP_NOPS (arg2);
13700
13701 /* To proceed, MPFR must exactly represent the target floating point
13702 format, which only happens when the target base equals two. */
13703 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13704 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13705 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13706 {
13707 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13708 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13709
13710 if (real_isfinite (ra1) && real_isfinite (ra2))
13711 {
13712 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13713 const int prec = fmt->p;
13714 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13715 int inexact;
13716 mpfr_t m1, m2;
13717
13718 mpfr_inits2 (prec, m1, m2, NULL);
13719 mpfr_from_real (m1, ra1, GMP_RNDN);
13720 mpfr_from_real (m2, ra2, GMP_RNDN);
13721 mpfr_clear_flags ();
13722 inexact = func (m1, m1, m2, rnd);
13723 result = do_mpfr_ckconv (m1, type, inexact);
13724 mpfr_clears (m1, m2, NULL);
13725 }
13726 }
13727
13728 return result;
13729 }
13730
13731 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13732 FUNC on it and return the resulting value as a tree with type TYPE.
13733 The mpfr precision is set to the precision of TYPE. We assume that
13734 function FUNC returns zero if the result could be calculated
13735 exactly within the requested precision. */
13736
13737 static tree
13738 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13739 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13740 {
13741 tree result = NULL_TREE;
13742
13743 STRIP_NOPS (arg1);
13744 STRIP_NOPS (arg2);
13745 STRIP_NOPS (arg3);
13746
13747 /* To proceed, MPFR must exactly represent the target floating point
13748 format, which only happens when the target base equals two. */
13749 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13750 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13751 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13752 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13753 {
13754 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13755 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13756 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13757
13758 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13759 {
13760 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13761 const int prec = fmt->p;
13762 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13763 int inexact;
13764 mpfr_t m1, m2, m3;
13765
13766 mpfr_inits2 (prec, m1, m2, m3, NULL);
13767 mpfr_from_real (m1, ra1, GMP_RNDN);
13768 mpfr_from_real (m2, ra2, GMP_RNDN);
13769 mpfr_from_real (m3, ra3, GMP_RNDN);
13770 mpfr_clear_flags ();
13771 inexact = func (m1, m1, m2, m3, rnd);
13772 result = do_mpfr_ckconv (m1, type, inexact);
13773 mpfr_clears (m1, m2, m3, NULL);
13774 }
13775 }
13776
13777 return result;
13778 }
13779
13780 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13781 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13782 If ARG_SINP and ARG_COSP are NULL then the result is returned
13783 as a complex value.
13784 The type is taken from the type of ARG and is used for setting the
13785 precision of the calculation and results. */
13786
13787 static tree
13788 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13789 {
13790 tree const type = TREE_TYPE (arg);
13791 tree result = NULL_TREE;
13792
13793 STRIP_NOPS (arg);
13794
13795 /* To proceed, MPFR must exactly represent the target floating point
13796 format, which only happens when the target base equals two. */
13797 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13798 && TREE_CODE (arg) == REAL_CST
13799 && !TREE_OVERFLOW (arg))
13800 {
13801 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13802
13803 if (real_isfinite (ra))
13804 {
13805 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13806 const int prec = fmt->p;
13807 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13808 tree result_s, result_c;
13809 int inexact;
13810 mpfr_t m, ms, mc;
13811
13812 mpfr_inits2 (prec, m, ms, mc, NULL);
13813 mpfr_from_real (m, ra, GMP_RNDN);
13814 mpfr_clear_flags ();
13815 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13816 result_s = do_mpfr_ckconv (ms, type, inexact);
13817 result_c = do_mpfr_ckconv (mc, type, inexact);
13818 mpfr_clears (m, ms, mc, NULL);
13819 if (result_s && result_c)
13820 {
13821 /* If we are to return in a complex value do so. */
13822 if (!arg_sinp && !arg_cosp)
13823 return build_complex (build_complex_type (type),
13824 result_c, result_s);
13825
13826 /* Dereference the sin/cos pointer arguments. */
13827 arg_sinp = build_fold_indirect_ref (arg_sinp);
13828 arg_cosp = build_fold_indirect_ref (arg_cosp);
13829 /* Proceed if valid pointer type were passed in. */
13830 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13831 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13832 {
13833 /* Set the values. */
13834 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13835 result_s);
13836 TREE_SIDE_EFFECTS (result_s) = 1;
13837 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13838 result_c);
13839 TREE_SIDE_EFFECTS (result_c) = 1;
13840 /* Combine the assignments into a compound expr. */
13841 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13842 result_s, result_c));
13843 }
13844 }
13845 }
13846 }
13847 return result;
13848 }
13849
13850 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13851 two-argument mpfr order N Bessel function FUNC on them and return
13852 the resulting value as a tree with type TYPE. The mpfr precision
13853 is set to the precision of TYPE. We assume that function FUNC
13854 returns zero if the result could be calculated exactly within the
13855 requested precision. */
13856 static tree
13857 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13858 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13859 const REAL_VALUE_TYPE *min, bool inclusive)
13860 {
13861 tree result = NULL_TREE;
13862
13863 STRIP_NOPS (arg1);
13864 STRIP_NOPS (arg2);
13865
13866 /* To proceed, MPFR must exactly represent the target floating point
13867 format, which only happens when the target base equals two. */
13868 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13869 && tree_fits_shwi_p (arg1)
13870 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13871 {
13872 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13873 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13874
13875 if (n == (long)n
13876 && real_isfinite (ra)
13877 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13878 {
13879 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13880 const int prec = fmt->p;
13881 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13882 int inexact;
13883 mpfr_t m;
13884
13885 mpfr_init2 (m, prec);
13886 mpfr_from_real (m, ra, GMP_RNDN);
13887 mpfr_clear_flags ();
13888 inexact = func (m, n, m, rnd);
13889 result = do_mpfr_ckconv (m, type, inexact);
13890 mpfr_clear (m);
13891 }
13892 }
13893
13894 return result;
13895 }
13896
13897 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13898 the pointer *(ARG_QUO) and return the result. The type is taken
13899 from the type of ARG0 and is used for setting the precision of the
13900 calculation and results. */
13901
13902 static tree
13903 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13904 {
13905 tree const type = TREE_TYPE (arg0);
13906 tree result = NULL_TREE;
13907
13908 STRIP_NOPS (arg0);
13909 STRIP_NOPS (arg1);
13910
13911 /* To proceed, MPFR must exactly represent the target floating point
13912 format, which only happens when the target base equals two. */
13913 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13914 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13915 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13916 {
13917 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13918 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13919
13920 if (real_isfinite (ra0) && real_isfinite (ra1))
13921 {
13922 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13923 const int prec = fmt->p;
13924 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13925 tree result_rem;
13926 long integer_quo;
13927 mpfr_t m0, m1;
13928
13929 mpfr_inits2 (prec, m0, m1, NULL);
13930 mpfr_from_real (m0, ra0, GMP_RNDN);
13931 mpfr_from_real (m1, ra1, GMP_RNDN);
13932 mpfr_clear_flags ();
13933 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13934 /* Remquo is independent of the rounding mode, so pass
13935 inexact=0 to do_mpfr_ckconv(). */
13936 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13937 mpfr_clears (m0, m1, NULL);
13938 if (result_rem)
13939 {
13940 /* MPFR calculates quo in the host's long so it may
13941 return more bits in quo than the target int can hold
13942 if sizeof(host long) > sizeof(target int). This can
13943 happen even for native compilers in LP64 mode. In
13944 these cases, modulo the quo value with the largest
13945 number that the target int can hold while leaving one
13946 bit for the sign. */
13947 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13948 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13949
13950 /* Dereference the quo pointer argument. */
13951 arg_quo = build_fold_indirect_ref (arg_quo);
13952 /* Proceed iff a valid pointer type was passed in. */
13953 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13954 {
13955 /* Set the value. */
13956 tree result_quo
13957 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13958 build_int_cst (TREE_TYPE (arg_quo),
13959 integer_quo));
13960 TREE_SIDE_EFFECTS (result_quo) = 1;
13961 /* Combine the quo assignment with the rem. */
13962 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13963 result_quo, result_rem));
13964 }
13965 }
13966 }
13967 }
13968 return result;
13969 }
13970
13971 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13972 resulting value as a tree with type TYPE. The mpfr precision is
13973 set to the precision of TYPE. We assume that this mpfr function
13974 returns zero if the result could be calculated exactly within the
13975 requested precision. In addition, the integer pointer represented
13976 by ARG_SG will be dereferenced and set to the appropriate signgam
13977 (-1,1) value. */
13978
13979 static tree
13980 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13981 {
13982 tree result = NULL_TREE;
13983
13984 STRIP_NOPS (arg);
13985
13986 /* To proceed, MPFR must exactly represent the target floating point
13987 format, which only happens when the target base equals two. Also
13988 verify ARG is a constant and that ARG_SG is an int pointer. */
13989 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13990 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13991 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13992 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13993 {
13994 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13995
13996 /* In addition to NaN and Inf, the argument cannot be zero or a
13997 negative integer. */
13998 if (real_isfinite (ra)
13999 && ra->cl != rvc_zero
14000 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
14001 {
14002 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14003 const int prec = fmt->p;
14004 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14005 int inexact, sg;
14006 mpfr_t m;
14007 tree result_lg;
14008
14009 mpfr_init2 (m, prec);
14010 mpfr_from_real (m, ra, GMP_RNDN);
14011 mpfr_clear_flags ();
14012 inexact = mpfr_lgamma (m, &sg, m, rnd);
14013 result_lg = do_mpfr_ckconv (m, type, inexact);
14014 mpfr_clear (m);
14015 if (result_lg)
14016 {
14017 tree result_sg;
14018
14019 /* Dereference the arg_sg pointer argument. */
14020 arg_sg = build_fold_indirect_ref (arg_sg);
14021 /* Assign the signgam value into *arg_sg. */
14022 result_sg = fold_build2 (MODIFY_EXPR,
14023 TREE_TYPE (arg_sg), arg_sg,
14024 build_int_cst (TREE_TYPE (arg_sg), sg));
14025 TREE_SIDE_EFFECTS (result_sg) = 1;
14026 /* Combine the signgam assignment with the lgamma result. */
14027 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14028 result_sg, result_lg));
14029 }
14030 }
14031 }
14032
14033 return result;
14034 }
14035
14036 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14037 function FUNC on it and return the resulting value as a tree with
14038 type TYPE. The mpfr precision is set to the precision of TYPE. We
14039 assume that function FUNC returns zero if the result could be
14040 calculated exactly within the requested precision. */
14041
14042 static tree
14043 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14044 {
14045 tree result = NULL_TREE;
14046
14047 STRIP_NOPS (arg);
14048
14049 /* To proceed, MPFR must exactly represent the target floating point
14050 format, which only happens when the target base equals two. */
14051 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14052 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14053 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14054 {
14055 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14056 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14057
14058 if (real_isfinite (re) && real_isfinite (im))
14059 {
14060 const struct real_format *const fmt =
14061 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14062 const int prec = fmt->p;
14063 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14064 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14065 int inexact;
14066 mpc_t m;
14067
14068 mpc_init2 (m, prec);
14069 mpfr_from_real (mpc_realref (m), re, rnd);
14070 mpfr_from_real (mpc_imagref (m), im, rnd);
14071 mpfr_clear_flags ();
14072 inexact = func (m, m, crnd);
14073 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14074 mpc_clear (m);
14075 }
14076 }
14077
14078 return result;
14079 }
14080
14081 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14082 mpc function FUNC on it and return the resulting value as a tree
14083 with type TYPE. The mpfr precision is set to the precision of
14084 TYPE. We assume that function FUNC returns zero if the result
14085 could be calculated exactly within the requested precision. If
14086 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14087 in the arguments and/or results. */
14088
14089 tree
14090 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14091 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14092 {
14093 tree result = NULL_TREE;
14094
14095 STRIP_NOPS (arg0);
14096 STRIP_NOPS (arg1);
14097
14098 /* To proceed, MPFR must exactly represent the target floating point
14099 format, which only happens when the target base equals two. */
14100 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14101 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14102 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14103 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14104 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14105 {
14106 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14107 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14108 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14109 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14110
14111 if (do_nonfinite
14112 || (real_isfinite (re0) && real_isfinite (im0)
14113 && real_isfinite (re1) && real_isfinite (im1)))
14114 {
14115 const struct real_format *const fmt =
14116 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14117 const int prec = fmt->p;
14118 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14119 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14120 int inexact;
14121 mpc_t m0, m1;
14122
14123 mpc_init2 (m0, prec);
14124 mpc_init2 (m1, prec);
14125 mpfr_from_real (mpc_realref (m0), re0, rnd);
14126 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14127 mpfr_from_real (mpc_realref (m1), re1, rnd);
14128 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14129 mpfr_clear_flags ();
14130 inexact = func (m0, m0, m1, crnd);
14131 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14132 mpc_clear (m0);
14133 mpc_clear (m1);
14134 }
14135 }
14136
14137 return result;
14138 }
14139
14140 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14141 a normal call should be emitted rather than expanding the function
14142 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14143
14144 static tree
14145 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14146 {
14147 int nargs = gimple_call_num_args (stmt);
14148
14149 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14150 (nargs > 0
14151 ? gimple_call_arg_ptr (stmt, 0)
14152 : &error_mark_node), fcode);
14153 }
14154
14155 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14156 a normal call should be emitted rather than expanding the function
14157 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14158 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14159 passed as second argument. */
14160
14161 tree
14162 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14163 enum built_in_function fcode)
14164 {
14165 int nargs = gimple_call_num_args (stmt);
14166
14167 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14168 (nargs > 0
14169 ? gimple_call_arg_ptr (stmt, 0)
14170 : &error_mark_node), maxlen, fcode);
14171 }
14172
14173 /* Builtins with folding operations that operate on "..." arguments
14174 need special handling; we need to store the arguments in a convenient
14175 data structure before attempting any folding. Fortunately there are
14176 only a few builtins that fall into this category. FNDECL is the
14177 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14178 result of the function call is ignored. */
14179
14180 static tree
14181 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14182 bool ignore ATTRIBUTE_UNUSED)
14183 {
14184 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14185 tree ret = NULL_TREE;
14186
14187 switch (fcode)
14188 {
14189 case BUILT_IN_SPRINTF_CHK:
14190 case BUILT_IN_VSPRINTF_CHK:
14191 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14192 break;
14193
14194 case BUILT_IN_SNPRINTF_CHK:
14195 case BUILT_IN_VSNPRINTF_CHK:
14196 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14197
14198 default:
14199 break;
14200 }
14201 if (ret)
14202 {
14203 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14204 TREE_NO_WARNING (ret) = 1;
14205 return ret;
14206 }
14207 return NULL_TREE;
14208 }
14209
14210 /* A wrapper function for builtin folding that prevents warnings for
14211 "statement without effect" and the like, caused by removing the
14212 call node earlier than the warning is generated. */
14213
14214 tree
14215 fold_call_stmt (gimple stmt, bool ignore)
14216 {
14217 tree ret = NULL_TREE;
14218 tree fndecl = gimple_call_fndecl (stmt);
14219 location_t loc = gimple_location (stmt);
14220 if (fndecl
14221 && TREE_CODE (fndecl) == FUNCTION_DECL
14222 && DECL_BUILT_IN (fndecl)
14223 && !gimple_call_va_arg_pack_p (stmt))
14224 {
14225 int nargs = gimple_call_num_args (stmt);
14226 tree *args = (nargs > 0
14227 ? gimple_call_arg_ptr (stmt, 0)
14228 : &error_mark_node);
14229
14230 if (avoid_folding_inline_builtin (fndecl))
14231 return NULL_TREE;
14232 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14233 {
14234 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14235 }
14236 else
14237 {
14238 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14239 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14240 if (!ret)
14241 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14242 if (ret)
14243 {
14244 /* Propagate location information from original call to
14245 expansion of builtin. Otherwise things like
14246 maybe_emit_chk_warning, that operate on the expansion
14247 of a builtin, will use the wrong location information. */
14248 if (gimple_has_location (stmt))
14249 {
14250 tree realret = ret;
14251 if (TREE_CODE (ret) == NOP_EXPR)
14252 realret = TREE_OPERAND (ret, 0);
14253 if (CAN_HAVE_LOCATION_P (realret)
14254 && !EXPR_HAS_LOCATION (realret))
14255 SET_EXPR_LOCATION (realret, loc);
14256 return realret;
14257 }
14258 return ret;
14259 }
14260 }
14261 }
14262 return NULL_TREE;
14263 }
14264
14265 /* Look up the function in builtin_decl that corresponds to DECL
14266 and set ASMSPEC as its user assembler name. DECL must be a
14267 function decl that declares a builtin. */
14268
14269 void
14270 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14271 {
14272 tree builtin;
14273 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14274 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14275 && asmspec != 0);
14276
14277 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14278 set_user_assembler_name (builtin, asmspec);
14279 switch (DECL_FUNCTION_CODE (decl))
14280 {
14281 case BUILT_IN_MEMCPY:
14282 init_block_move_fn (asmspec);
14283 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14284 break;
14285 case BUILT_IN_MEMSET:
14286 init_block_clear_fn (asmspec);
14287 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14288 break;
14289 case BUILT_IN_MEMMOVE:
14290 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14291 break;
14292 case BUILT_IN_MEMCMP:
14293 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14294 break;
14295 case BUILT_IN_ABORT:
14296 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14297 break;
14298 case BUILT_IN_FFS:
14299 if (INT_TYPE_SIZE < BITS_PER_WORD)
14300 {
14301 set_user_assembler_libfunc ("ffs", asmspec);
14302 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14303 MODE_INT, 0), "ffs");
14304 }
14305 break;
14306 default:
14307 break;
14308 }
14309 }
14310
14311 /* Return true if DECL is a builtin that expands to a constant or similarly
14312 simple code. */
14313 bool
14314 is_simple_builtin (tree decl)
14315 {
14316 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14317 switch (DECL_FUNCTION_CODE (decl))
14318 {
14319 /* Builtins that expand to constants. */
14320 case BUILT_IN_CONSTANT_P:
14321 case BUILT_IN_EXPECT:
14322 case BUILT_IN_OBJECT_SIZE:
14323 case BUILT_IN_UNREACHABLE:
14324 /* Simple register moves or loads from stack. */
14325 case BUILT_IN_ASSUME_ALIGNED:
14326 case BUILT_IN_RETURN_ADDRESS:
14327 case BUILT_IN_EXTRACT_RETURN_ADDR:
14328 case BUILT_IN_FROB_RETURN_ADDR:
14329 case BUILT_IN_RETURN:
14330 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14331 case BUILT_IN_FRAME_ADDRESS:
14332 case BUILT_IN_VA_END:
14333 case BUILT_IN_STACK_SAVE:
14334 case BUILT_IN_STACK_RESTORE:
14335 /* Exception state returns or moves registers around. */
14336 case BUILT_IN_EH_FILTER:
14337 case BUILT_IN_EH_POINTER:
14338 case BUILT_IN_EH_COPY_VALUES:
14339 return true;
14340
14341 default:
14342 return false;
14343 }
14344
14345 return false;
14346 }
14347
14348 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14349 most probably expanded inline into reasonably simple code. This is a
14350 superset of is_simple_builtin. */
14351 bool
14352 is_inexpensive_builtin (tree decl)
14353 {
14354 if (!decl)
14355 return false;
14356 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14357 return true;
14358 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14359 switch (DECL_FUNCTION_CODE (decl))
14360 {
14361 case BUILT_IN_ABS:
14362 case BUILT_IN_ALLOCA:
14363 case BUILT_IN_ALLOCA_WITH_ALIGN:
14364 case BUILT_IN_BSWAP16:
14365 case BUILT_IN_BSWAP32:
14366 case BUILT_IN_BSWAP64:
14367 case BUILT_IN_CLZ:
14368 case BUILT_IN_CLZIMAX:
14369 case BUILT_IN_CLZL:
14370 case BUILT_IN_CLZLL:
14371 case BUILT_IN_CTZ:
14372 case BUILT_IN_CTZIMAX:
14373 case BUILT_IN_CTZL:
14374 case BUILT_IN_CTZLL:
14375 case BUILT_IN_FFS:
14376 case BUILT_IN_FFSIMAX:
14377 case BUILT_IN_FFSL:
14378 case BUILT_IN_FFSLL:
14379 case BUILT_IN_IMAXABS:
14380 case BUILT_IN_FINITE:
14381 case BUILT_IN_FINITEF:
14382 case BUILT_IN_FINITEL:
14383 case BUILT_IN_FINITED32:
14384 case BUILT_IN_FINITED64:
14385 case BUILT_IN_FINITED128:
14386 case BUILT_IN_FPCLASSIFY:
14387 case BUILT_IN_ISFINITE:
14388 case BUILT_IN_ISINF_SIGN:
14389 case BUILT_IN_ISINF:
14390 case BUILT_IN_ISINFF:
14391 case BUILT_IN_ISINFL:
14392 case BUILT_IN_ISINFD32:
14393 case BUILT_IN_ISINFD64:
14394 case BUILT_IN_ISINFD128:
14395 case BUILT_IN_ISNAN:
14396 case BUILT_IN_ISNANF:
14397 case BUILT_IN_ISNANL:
14398 case BUILT_IN_ISNAND32:
14399 case BUILT_IN_ISNAND64:
14400 case BUILT_IN_ISNAND128:
14401 case BUILT_IN_ISNORMAL:
14402 case BUILT_IN_ISGREATER:
14403 case BUILT_IN_ISGREATEREQUAL:
14404 case BUILT_IN_ISLESS:
14405 case BUILT_IN_ISLESSEQUAL:
14406 case BUILT_IN_ISLESSGREATER:
14407 case BUILT_IN_ISUNORDERED:
14408 case BUILT_IN_VA_ARG_PACK:
14409 case BUILT_IN_VA_ARG_PACK_LEN:
14410 case BUILT_IN_VA_COPY:
14411 case BUILT_IN_TRAP:
14412 case BUILT_IN_SAVEREGS:
14413 case BUILT_IN_POPCOUNTL:
14414 case BUILT_IN_POPCOUNTLL:
14415 case BUILT_IN_POPCOUNTIMAX:
14416 case BUILT_IN_POPCOUNT:
14417 case BUILT_IN_PARITYL:
14418 case BUILT_IN_PARITYLL:
14419 case BUILT_IN_PARITYIMAX:
14420 case BUILT_IN_PARITY:
14421 case BUILT_IN_LABS:
14422 case BUILT_IN_LLABS:
14423 case BUILT_IN_PREFETCH:
14424 return true;
14425
14426 default:
14427 return is_simple_builtin (decl);
14428 }
14429
14430 return false;
14431 }