re PR middle-end/58145 (volatileness of write is discarded, perhaps in "lim1" related...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "realmpfr.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45 #include "basic-block.h"
46 #include "tree-mudflap.h"
47 #include "tree-flow.h"
48 #include "value-prof.h"
49 #include "diagnostic-core.h"
50 #include "builtins.h"
51
52
53 #ifndef PAD_VARARGS_DOWN
54 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
55 #endif
56 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
57
58 struct target_builtins default_target_builtins;
59 #if SWITCHABLE_TARGET
60 struct target_builtins *this_target_builtins = &default_target_builtins;
61 #endif
62
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
69 {
70 #include "builtins.def"
71 };
72 #undef DEF_BUILTIN
73
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 builtin_info_type builtin_info;
77
78 /* Non-zero if __builtin_constant_p should be folded right away. */
79 bool force_folding_builtin_constant_p;
80
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
89 #endif
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_strcmp (tree, rtx);
114 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
115 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
116 static rtx expand_builtin_memcpy (tree, rtx);
117 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
119 enum machine_mode, int);
120 static rtx expand_builtin_strcpy (tree, rtx);
121 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
122 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strncpy (tree, rtx);
124 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
125 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
127 static rtx expand_builtin_bzero (tree);
128 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_alloca (tree, bool);
130 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
131 static rtx expand_builtin_frame_address (tree, tree);
132 static tree stabilize_va_list_loc (location_t, tree, int);
133 static rtx expand_builtin_expect (tree, rtx);
134 static tree fold_builtin_constant_p (tree);
135 static tree fold_builtin_expect (location_t, tree, tree);
136 static tree fold_builtin_classify_type (tree);
137 static tree fold_builtin_strlen (location_t, tree, tree);
138 static tree fold_builtin_inf (location_t, tree, int);
139 static tree fold_builtin_nan (tree, tree, int);
140 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
141 static bool validate_arg (const_tree, enum tree_code code);
142 static bool integer_valued_real_p (tree);
143 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
144 static bool readonly_data_expr (tree);
145 static rtx expand_builtin_fabs (tree, rtx, rtx);
146 static rtx expand_builtin_signbit (tree, rtx);
147 static tree fold_builtin_sqrt (location_t, tree, tree);
148 static tree fold_builtin_cbrt (location_t, tree, tree);
149 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_cos (location_t, tree, tree, tree);
152 static tree fold_builtin_cosh (location_t, tree, tree, tree);
153 static tree fold_builtin_tan (tree, tree);
154 static tree fold_builtin_trunc (location_t, tree, tree);
155 static tree fold_builtin_floor (location_t, tree, tree);
156 static tree fold_builtin_ceil (location_t, tree, tree);
157 static tree fold_builtin_round (location_t, tree, tree);
158 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
159 static tree fold_builtin_bitop (tree, tree);
160 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
161 static tree fold_builtin_strchr (location_t, tree, tree, tree);
162 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
164 static tree fold_builtin_strcmp (location_t, tree, tree);
165 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
166 static tree fold_builtin_signbit (location_t, tree, tree);
167 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
168 static tree fold_builtin_isascii (location_t, tree);
169 static tree fold_builtin_toascii (location_t, tree);
170 static tree fold_builtin_isdigit (location_t, tree);
171 static tree fold_builtin_fabs (location_t, tree, tree);
172 static tree fold_builtin_abs (location_t, tree, tree);
173 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
174 enum tree_code);
175 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
176 static tree fold_builtin_0 (location_t, tree, bool);
177 static tree fold_builtin_1 (location_t, tree, tree, bool);
178 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
179 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
180 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
181 static tree fold_builtin_varargs (location_t, tree, tree, bool);
182
183 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
184 static tree fold_builtin_strstr (location_t, tree, tree, tree);
185 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
186 static tree fold_builtin_strcat (location_t, tree, tree);
187 static tree fold_builtin_strncat (location_t, tree, tree, tree);
188 static tree fold_builtin_strspn (location_t, tree, tree);
189 static tree fold_builtin_strcspn (location_t, tree, tree);
190 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
191 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
192
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
207
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
227 static void expand_builtin_sync_synchronize (void);
228
229 /* Return true if NAME starts with __builtin_ or __sync_. */
230
231 static bool
232 is_builtin_name (const char *name)
233 {
234 if (strncmp (name, "__builtin_", 10) == 0)
235 return true;
236 if (strncmp (name, "__sync_", 7) == 0)
237 return true;
238 if (strncmp (name, "__atomic_", 9) == 0)
239 return true;
240 return false;
241 }
242
243
244 /* Return true if DECL is a function symbol representing a built-in. */
245
246 bool
247 is_builtin_fn (tree decl)
248 {
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 }
251
252
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
256
257 static bool
258 called_as_built_in (tree node)
259 {
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
262 will have. */
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
265 }
266
267 /* Compute values M and N such that M divides (address of EXP - N) and such
268 that N < M. If these numbers can be determined, store M in alignp and N in
269 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
270 *alignp and any bit-offset to *bitposp.
271
272 Note that the address (and thus the alignment) computed here is based
273 on the address to which a symbol resolves, whereas DECL_ALIGN is based
274 on the address at which an object is actually located. These two
275 addresses are not always the same. For example, on ARM targets,
276 the address &foo of a Thumb function foo() has the lowest bit set,
277 whereas foo() itself starts on an even address.
278
279 If ADDR_P is true we are taking the address of the memory reference EXP
280 and thus cannot rely on the access taking place. */
281
282 static bool
283 get_object_alignment_2 (tree exp, unsigned int *alignp,
284 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
285 {
286 HOST_WIDE_INT bitsize, bitpos;
287 tree offset;
288 enum machine_mode mode;
289 int unsignedp, volatilep;
290 unsigned int inner, align = BITS_PER_UNIT;
291 bool known_alignment = false;
292
293 /* Get the innermost object and the constant (bitpos) and possibly
294 variable (offset) offset of the access. */
295 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
296 &mode, &unsignedp, &volatilep, true);
297
298 /* Extract alignment information from the innermost object and
299 possibly adjust bitpos and offset. */
300 if (TREE_CODE (exp) == FUNCTION_DECL)
301 {
302 /* Function addresses can encode extra information besides their
303 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
304 allows the low bit to be used as a virtual bit, we know
305 that the address itself must be at least 2-byte aligned. */
306 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
307 align = 2 * BITS_PER_UNIT;
308 }
309 else if (TREE_CODE (exp) == LABEL_DECL)
310 ;
311 else if (TREE_CODE (exp) == CONST_DECL)
312 {
313 /* The alignment of a CONST_DECL is determined by its initializer. */
314 exp = DECL_INITIAL (exp);
315 align = TYPE_ALIGN (TREE_TYPE (exp));
316 #ifdef CONSTANT_ALIGNMENT
317 if (CONSTANT_CLASS_P (exp))
318 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
319 #endif
320 known_alignment = true;
321 }
322 else if (DECL_P (exp))
323 {
324 align = DECL_ALIGN (exp);
325 known_alignment = true;
326 }
327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
328 {
329 align = TYPE_ALIGN (TREE_TYPE (exp));
330 }
331 else if (TREE_CODE (exp) == INDIRECT_REF
332 || TREE_CODE (exp) == MEM_REF
333 || TREE_CODE (exp) == TARGET_MEM_REF)
334 {
335 tree addr = TREE_OPERAND (exp, 0);
336 unsigned ptr_align;
337 unsigned HOST_WIDE_INT ptr_bitpos;
338
339 if (TREE_CODE (addr) == BIT_AND_EXPR
340 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
341 {
342 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
343 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
344 align *= BITS_PER_UNIT;
345 addr = TREE_OPERAND (addr, 0);
346 }
347
348 known_alignment
349 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
350 align = MAX (ptr_align, align);
351
352 /* The alignment of the pointer operand in a TARGET_MEM_REF
353 has to take the variable offset parts into account. */
354 if (TREE_CODE (exp) == TARGET_MEM_REF)
355 {
356 if (TMR_INDEX (exp))
357 {
358 unsigned HOST_WIDE_INT step = 1;
359 if (TMR_STEP (exp))
360 step = TREE_INT_CST_LOW (TMR_STEP (exp));
361 align = MIN (align, (step & -step) * BITS_PER_UNIT);
362 }
363 if (TMR_INDEX2 (exp))
364 align = BITS_PER_UNIT;
365 known_alignment = false;
366 }
367
368 /* When EXP is an actual memory reference then we can use
369 TYPE_ALIGN of a pointer indirection to derive alignment.
370 Do so only if get_pointer_alignment_1 did not reveal absolute
371 alignment knowledge and if using that alignment would
372 improve the situation. */
373 if (!addr_p && !known_alignment
374 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
375 align = TYPE_ALIGN (TREE_TYPE (exp));
376 else
377 {
378 /* Else adjust bitpos accordingly. */
379 bitpos += ptr_bitpos;
380 if (TREE_CODE (exp) == MEM_REF
381 || TREE_CODE (exp) == TARGET_MEM_REF)
382 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
383 }
384 }
385 else if (TREE_CODE (exp) == STRING_CST)
386 {
387 /* STRING_CST are the only constant objects we allow to be not
388 wrapped inside a CONST_DECL. */
389 align = TYPE_ALIGN (TREE_TYPE (exp));
390 #ifdef CONSTANT_ALIGNMENT
391 if (CONSTANT_CLASS_P (exp))
392 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
393 #endif
394 known_alignment = true;
395 }
396
397 /* If there is a non-constant offset part extract the maximum
398 alignment that can prevail. */
399 inner = ~0U;
400 while (offset)
401 {
402 tree next_offset;
403
404 if (TREE_CODE (offset) == PLUS_EXPR)
405 {
406 next_offset = TREE_OPERAND (offset, 0);
407 offset = TREE_OPERAND (offset, 1);
408 }
409 else
410 next_offset = NULL;
411 if (host_integerp (offset, 1))
412 {
413 /* Any overflow in calculating offset_bits won't change
414 the alignment. */
415 unsigned offset_bits
416 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
417
418 if (offset_bits)
419 inner = MIN (inner, (offset_bits & -offset_bits));
420 }
421 else if (TREE_CODE (offset) == MULT_EXPR
422 && host_integerp (TREE_OPERAND (offset, 1), 1))
423 {
424 /* Any overflow in calculating offset_factor won't change
425 the alignment. */
426 unsigned offset_factor
427 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
428 * BITS_PER_UNIT);
429
430 if (offset_factor)
431 inner = MIN (inner, (offset_factor & -offset_factor));
432 }
433 else
434 {
435 inner = MIN (inner, BITS_PER_UNIT);
436 break;
437 }
438 offset = next_offset;
439 }
440 /* Alignment is innermost object alignment adjusted by the constant
441 and non-constant offset parts. */
442 align = MIN (align, inner);
443
444 *alignp = align;
445 *bitposp = bitpos & (*alignp - 1);
446 return known_alignment;
447 }
448
449 /* For a memory reference expression EXP compute values M and N such that M
450 divides (&EXP - N) and such that N < M. If these numbers can be determined,
451 store M in alignp and N in *BITPOSP and return true. Otherwise return false
452 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
453
454 bool
455 get_object_alignment_1 (tree exp, unsigned int *alignp,
456 unsigned HOST_WIDE_INT *bitposp)
457 {
458 return get_object_alignment_2 (exp, alignp, bitposp, false);
459 }
460
461 /* Return the alignment in bits of EXP, an object. */
462
463 unsigned int
464 get_object_alignment (tree exp)
465 {
466 unsigned HOST_WIDE_INT bitpos = 0;
467 unsigned int align;
468
469 get_object_alignment_1 (exp, &align, &bitpos);
470
471 /* align and bitpos now specify known low bits of the pointer.
472 ptr & (align - 1) == bitpos. */
473
474 if (bitpos != 0)
475 align = (bitpos & -bitpos);
476 return align;
477 }
478
479 /* For a pointer valued expression EXP compute values M and N such that M
480 divides (EXP - N) and such that N < M. If these numbers can be determined,
481 store M in alignp and N in *BITPOSP and return true. Return false if
482 the results are just a conservative approximation.
483
484 If EXP is not a pointer, false is returned too. */
485
486 bool
487 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
488 unsigned HOST_WIDE_INT *bitposp)
489 {
490 STRIP_NOPS (exp);
491
492 if (TREE_CODE (exp) == ADDR_EXPR)
493 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
494 alignp, bitposp, true);
495 else if (TREE_CODE (exp) == SSA_NAME
496 && POINTER_TYPE_P (TREE_TYPE (exp)))
497 {
498 unsigned int ptr_align, ptr_misalign;
499 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
500
501 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
502 {
503 *bitposp = ptr_misalign * BITS_PER_UNIT;
504 *alignp = ptr_align * BITS_PER_UNIT;
505 /* We cannot really tell whether this result is an approximation. */
506 return true;
507 }
508 else
509 {
510 *bitposp = 0;
511 *alignp = BITS_PER_UNIT;
512 return false;
513 }
514 }
515 else if (TREE_CODE (exp) == INTEGER_CST)
516 {
517 *alignp = BIGGEST_ALIGNMENT;
518 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
519 & (BIGGEST_ALIGNMENT - 1));
520 return true;
521 }
522
523 *bitposp = 0;
524 *alignp = BITS_PER_UNIT;
525 return false;
526 }
527
528 /* Return the alignment in bits of EXP, a pointer valued expression.
529 The alignment returned is, by default, the alignment of the thing that
530 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
531
532 Otherwise, look at the expression to see if we can do better, i.e., if the
533 expression is actually pointing at an object whose alignment is tighter. */
534
535 unsigned int
536 get_pointer_alignment (tree exp)
537 {
538 unsigned HOST_WIDE_INT bitpos = 0;
539 unsigned int align;
540
541 get_pointer_alignment_1 (exp, &align, &bitpos);
542
543 /* align and bitpos now specify known low bits of the pointer.
544 ptr & (align - 1) == bitpos. */
545
546 if (bitpos != 0)
547 align = (bitpos & -bitpos);
548
549 return align;
550 }
551
552 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
553 way, because it could contain a zero byte in the middle.
554 TREE_STRING_LENGTH is the size of the character array, not the string.
555
556 ONLY_VALUE should be nonzero if the result is not going to be emitted
557 into the instruction stream and zero if it is going to be expanded.
558 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
559 is returned, otherwise NULL, since
560 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
561 evaluate the side-effects.
562
563 The value returned is of type `ssizetype'.
564
565 Unfortunately, string_constant can't access the values of const char
566 arrays with initializers, so neither can we do so here. */
567
568 tree
569 c_strlen (tree src, int only_value)
570 {
571 tree offset_node;
572 HOST_WIDE_INT offset;
573 int max;
574 const char *ptr;
575 location_t loc;
576
577 STRIP_NOPS (src);
578 if (TREE_CODE (src) == COND_EXPR
579 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
580 {
581 tree len1, len2;
582
583 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
584 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
585 if (tree_int_cst_equal (len1, len2))
586 return len1;
587 }
588
589 if (TREE_CODE (src) == COMPOUND_EXPR
590 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 return c_strlen (TREE_OPERAND (src, 1), only_value);
592
593 loc = EXPR_LOC_OR_HERE (src);
594
595 src = string_constant (src, &offset_node);
596 if (src == 0)
597 return NULL_TREE;
598
599 max = TREE_STRING_LENGTH (src) - 1;
600 ptr = TREE_STRING_POINTER (src);
601
602 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
603 {
604 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
605 compute the offset to the following null if we don't know where to
606 start searching for it. */
607 int i;
608
609 for (i = 0; i < max; i++)
610 if (ptr[i] == 0)
611 return NULL_TREE;
612
613 /* We don't know the starting offset, but we do know that the string
614 has no internal zero bytes. We can assume that the offset falls
615 within the bounds of the string; otherwise, the programmer deserves
616 what he gets. Subtract the offset from the length of the string,
617 and return that. This would perhaps not be valid if we were dealing
618 with named arrays in addition to literal string constants. */
619
620 return size_diffop_loc (loc, size_int (max), offset_node);
621 }
622
623 /* We have a known offset into the string. Start searching there for
624 a null character if we can represent it as a single HOST_WIDE_INT. */
625 if (offset_node == 0)
626 offset = 0;
627 else if (! host_integerp (offset_node, 0))
628 offset = -1;
629 else
630 offset = tree_low_cst (offset_node, 0);
631
632 /* If the offset is known to be out of bounds, warn, and call strlen at
633 runtime. */
634 if (offset < 0 || offset > max)
635 {
636 /* Suppress multiple warnings for propagated constant strings. */
637 if (! TREE_NO_WARNING (src))
638 {
639 warning_at (loc, 0, "offset outside bounds of constant string");
640 TREE_NO_WARNING (src) = 1;
641 }
642 return NULL_TREE;
643 }
644
645 /* Use strlen to search for the first zero byte. Since any strings
646 constructed with build_string will have nulls appended, we win even
647 if we get handed something like (char[4])"abcd".
648
649 Since OFFSET is our starting index into the string, no further
650 calculation is needed. */
651 return ssize_int (strlen (ptr + offset));
652 }
653
654 /* Return a char pointer for a C string if it is a string constant
655 or sum of string constant and integer constant. */
656
657 static const char *
658 c_getstr (tree src)
659 {
660 tree offset_node;
661
662 src = string_constant (src, &offset_node);
663 if (src == 0)
664 return 0;
665
666 if (offset_node == 0)
667 return TREE_STRING_POINTER (src);
668 else if (!host_integerp (offset_node, 1)
669 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
670 return 0;
671
672 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
673 }
674
675 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
676 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
677
678 static rtx
679 c_readstr (const char *str, enum machine_mode mode)
680 {
681 HOST_WIDE_INT c[2];
682 HOST_WIDE_INT ch;
683 unsigned int i, j;
684
685 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
686
687 c[0] = 0;
688 c[1] = 0;
689 ch = 1;
690 for (i = 0; i < GET_MODE_SIZE (mode); i++)
691 {
692 j = i;
693 if (WORDS_BIG_ENDIAN)
694 j = GET_MODE_SIZE (mode) - i - 1;
695 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
696 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
697 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
698 j *= BITS_PER_UNIT;
699 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
700
701 if (ch)
702 ch = (unsigned char) str[i];
703 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
704 }
705 return immed_double_const (c[0], c[1], mode);
706 }
707
708 /* Cast a target constant CST to target CHAR and if that value fits into
709 host char type, return zero and put that value into variable pointed to by
710 P. */
711
712 static int
713 target_char_cast (tree cst, char *p)
714 {
715 unsigned HOST_WIDE_INT val, hostval;
716
717 if (TREE_CODE (cst) != INTEGER_CST
718 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
719 return 1;
720
721 val = TREE_INT_CST_LOW (cst);
722 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
723 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
724
725 hostval = val;
726 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
727 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
728
729 if (val != hostval)
730 return 1;
731
732 *p = hostval;
733 return 0;
734 }
735
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
739
740 static tree
741 builtin_save_expr (tree exp)
742 {
743 if (TREE_CODE (exp) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp) == 0
745 && (TREE_CODE (exp) == PARM_DECL
746 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
747 return exp;
748
749 return save_expr (exp);
750 }
751
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
755
756 static rtx
757 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
758 {
759 int i;
760
761 #ifdef INITIAL_FRAME_ADDRESS_RTX
762 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
763 #else
764 rtx tem;
765
766 /* For a zero count with __builtin_return_address, we don't care what
767 frame address we return, because target-specific definitions will
768 override us. Therefore frame pointer elimination is OK, and using
769 the soft frame pointer is OK.
770
771 For a nonzero count, or a zero count with __builtin_frame_address,
772 we require a stable offset from the current frame pointer to the
773 previous one, so we must use the hard frame pointer, and
774 we must disable frame pointer elimination. */
775 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
776 tem = frame_pointer_rtx;
777 else
778 {
779 tem = hard_frame_pointer_rtx;
780
781 /* Tell reload not to eliminate the frame pointer. */
782 crtl->accesses_prior_frames = 1;
783 }
784 #endif
785
786 /* Some machines need special handling before we can access
787 arbitrary frames. For example, on the SPARC, we must first flush
788 all register windows to the stack. */
789 #ifdef SETUP_FRAME_ADDRESSES
790 if (count > 0)
791 SETUP_FRAME_ADDRESSES ();
792 #endif
793
794 /* On the SPARC, the return address is not in the frame, it is in a
795 register. There is no way to access it off of the current frame
796 pointer, but it can be accessed off the previous frame pointer by
797 reading the value from the register window save area. */
798 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
799 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
800 count--;
801 #endif
802
803 /* Scan back COUNT frames to the specified frame. */
804 for (i = 0; i < count; i++)
805 {
806 /* Assume the dynamic chain pointer is in the word that the
807 frame address points to, unless otherwise specified. */
808 #ifdef DYNAMIC_CHAIN_ADDRESS
809 tem = DYNAMIC_CHAIN_ADDRESS (tem);
810 #endif
811 tem = memory_address (Pmode, tem);
812 tem = gen_frame_mem (Pmode, tem);
813 tem = copy_to_reg (tem);
814 }
815
816 /* For __builtin_frame_address, return what we've got. But, on
817 the SPARC for example, we may have to add a bias. */
818 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
819 #ifdef FRAME_ADDR_RTX
820 return FRAME_ADDR_RTX (tem);
821 #else
822 return tem;
823 #endif
824
825 /* For __builtin_return_address, get the return address from that frame. */
826 #ifdef RETURN_ADDR_RTX
827 tem = RETURN_ADDR_RTX (count, tem);
828 #else
829 tem = memory_address (Pmode,
830 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
831 tem = gen_frame_mem (Pmode, tem);
832 #endif
833 return tem;
834 }
835
836 /* Alias set used for setjmp buffer. */
837 static alias_set_type setjmp_alias_set = -1;
838
839 /* Construct the leading half of a __builtin_setjmp call. Control will
840 return to RECEIVER_LABEL. This is also called directly by the SJLJ
841 exception handling code. */
842
843 void
844 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
845 {
846 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
847 rtx stack_save;
848 rtx mem;
849
850 if (setjmp_alias_set == -1)
851 setjmp_alias_set = new_alias_set ();
852
853 buf_addr = convert_memory_address (Pmode, buf_addr);
854
855 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
856
857 /* We store the frame pointer and the address of receiver_label in
858 the buffer and use the rest of it for the stack save area, which
859 is machine-dependent. */
860
861 mem = gen_rtx_MEM (Pmode, buf_addr);
862 set_mem_alias_set (mem, setjmp_alias_set);
863 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
864
865 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
866 GET_MODE_SIZE (Pmode))),
867 set_mem_alias_set (mem, setjmp_alias_set);
868
869 emit_move_insn (validize_mem (mem),
870 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
871
872 stack_save = gen_rtx_MEM (sa_mode,
873 plus_constant (Pmode, buf_addr,
874 2 * GET_MODE_SIZE (Pmode)));
875 set_mem_alias_set (stack_save, setjmp_alias_set);
876 emit_stack_save (SAVE_NONLOCAL, &stack_save);
877
878 /* If there is further processing to do, do it. */
879 #ifdef HAVE_builtin_setjmp_setup
880 if (HAVE_builtin_setjmp_setup)
881 emit_insn (gen_builtin_setjmp_setup (buf_addr));
882 #endif
883
884 /* We have a nonlocal label. */
885 cfun->has_nonlocal_label = 1;
886 }
887
888 /* Construct the trailing part of a __builtin_setjmp call. This is
889 also called directly by the SJLJ exception handling code.
890 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
891
892 void
893 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
894 {
895 rtx chain;
896
897 /* Mark the FP as used when we get here, so we have to make sure it's
898 marked as used by this function. */
899 emit_use (hard_frame_pointer_rtx);
900
901 /* Mark the static chain as clobbered here so life information
902 doesn't get messed up for it. */
903 chain = targetm.calls.static_chain (current_function_decl, true);
904 if (chain && REG_P (chain))
905 emit_clobber (chain);
906
907 /* Now put in the code to restore the frame pointer, and argument
908 pointer, if needed. */
909 #ifdef HAVE_nonlocal_goto
910 if (! HAVE_nonlocal_goto)
911 #endif
912 /* First adjust our frame pointer to its actual value. It was
913 previously set to the start of the virtual area corresponding to
914 the stacked variables when we branched here and now needs to be
915 adjusted to the actual hardware fp value.
916
917 Assignments to virtual registers are converted by
918 instantiate_virtual_regs into the corresponding assignment
919 to the underlying register (fp in this case) that makes
920 the original assignment true.
921 So the following insn will actually be decrementing fp by
922 STARTING_FRAME_OFFSET. */
923 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
924
925 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
926 if (fixed_regs[ARG_POINTER_REGNUM])
927 {
928 #ifdef ELIMINABLE_REGS
929 /* If the argument pointer can be eliminated in favor of the
930 frame pointer, we don't need to restore it. We assume here
931 that if such an elimination is present, it can always be used.
932 This is the case on all known machines; if we don't make this
933 assumption, we do unnecessary saving on many machines. */
934 size_t i;
935 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
936
937 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
938 if (elim_regs[i].from == ARG_POINTER_REGNUM
939 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
940 break;
941
942 if (i == ARRAY_SIZE (elim_regs))
943 #endif
944 {
945 /* Now restore our arg pointer from the address at which it
946 was saved in our stack frame. */
947 emit_move_insn (crtl->args.internal_arg_pointer,
948 copy_to_reg (get_arg_pointer_save_area ()));
949 }
950 }
951 #endif
952
953 #ifdef HAVE_builtin_setjmp_receiver
954 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
955 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
956 else
957 #endif
958 #ifdef HAVE_nonlocal_goto_receiver
959 if (HAVE_nonlocal_goto_receiver)
960 emit_insn (gen_nonlocal_goto_receiver ());
961 else
962 #endif
963 { /* Nothing */ }
964
965 /* We must not allow the code we just generated to be reordered by
966 scheduling. Specifically, the update of the frame pointer must
967 happen immediately, not later. Similarly, we must block
968 (frame-related) register values to be used across this code. */
969 emit_insn (gen_blockage ());
970 }
971
972 /* __builtin_longjmp is passed a pointer to an array of five words (not
973 all will be used on all machines). It operates similarly to the C
974 library function of the same name, but is more efficient. Much of
975 the code below is copied from the handling of non-local gotos. */
976
977 static void
978 expand_builtin_longjmp (rtx buf_addr, rtx value)
979 {
980 rtx fp, lab, stack, insn, last;
981 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
982
983 /* DRAP is needed for stack realign if longjmp is expanded to current
984 function */
985 if (SUPPORTS_STACK_ALIGNMENT)
986 crtl->need_drap = true;
987
988 if (setjmp_alias_set == -1)
989 setjmp_alias_set = new_alias_set ();
990
991 buf_addr = convert_memory_address (Pmode, buf_addr);
992
993 buf_addr = force_reg (Pmode, buf_addr);
994
995 /* We require that the user must pass a second argument of 1, because
996 that is what builtin_setjmp will return. */
997 gcc_assert (value == const1_rtx);
998
999 last = get_last_insn ();
1000 #ifdef HAVE_builtin_longjmp
1001 if (HAVE_builtin_longjmp)
1002 emit_insn (gen_builtin_longjmp (buf_addr));
1003 else
1004 #endif
1005 {
1006 fp = gen_rtx_MEM (Pmode, buf_addr);
1007 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1008 GET_MODE_SIZE (Pmode)));
1009
1010 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1011 2 * GET_MODE_SIZE (Pmode)));
1012 set_mem_alias_set (fp, setjmp_alias_set);
1013 set_mem_alias_set (lab, setjmp_alias_set);
1014 set_mem_alias_set (stack, setjmp_alias_set);
1015
1016 /* Pick up FP, label, and SP from the block and jump. This code is
1017 from expand_goto in stmt.c; see there for detailed comments. */
1018 #ifdef HAVE_nonlocal_goto
1019 if (HAVE_nonlocal_goto)
1020 /* We have to pass a value to the nonlocal_goto pattern that will
1021 get copied into the static_chain pointer, but it does not matter
1022 what that value is, because builtin_setjmp does not use it. */
1023 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1024 else
1025 #endif
1026 {
1027 lab = copy_to_reg (lab);
1028
1029 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1030 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1031
1032 emit_move_insn (hard_frame_pointer_rtx, fp);
1033 emit_stack_restore (SAVE_NONLOCAL, stack);
1034
1035 emit_use (hard_frame_pointer_rtx);
1036 emit_use (stack_pointer_rtx);
1037 emit_indirect_jump (lab);
1038 }
1039 }
1040
1041 /* Search backwards and mark the jump insn as a non-local goto.
1042 Note that this precludes the use of __builtin_longjmp to a
1043 __builtin_setjmp target in the same function. However, we've
1044 already cautioned the user that these functions are for
1045 internal exception handling use only. */
1046 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1047 {
1048 gcc_assert (insn != last);
1049
1050 if (JUMP_P (insn))
1051 {
1052 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1053 break;
1054 }
1055 else if (CALL_P (insn))
1056 break;
1057 }
1058 }
1059
1060 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1061 and the address of the save area. */
1062
1063 static rtx
1064 expand_builtin_nonlocal_goto (tree exp)
1065 {
1066 tree t_label, t_save_area;
1067 rtx r_label, r_save_area, r_fp, r_sp, insn;
1068
1069 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1070 return NULL_RTX;
1071
1072 t_label = CALL_EXPR_ARG (exp, 0);
1073 t_save_area = CALL_EXPR_ARG (exp, 1);
1074
1075 r_label = expand_normal (t_label);
1076 r_label = convert_memory_address (Pmode, r_label);
1077 r_save_area = expand_normal (t_save_area);
1078 r_save_area = convert_memory_address (Pmode, r_save_area);
1079 /* Copy the address of the save location to a register just in case it was
1080 based on the frame pointer. */
1081 r_save_area = copy_to_reg (r_save_area);
1082 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1083 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1084 plus_constant (Pmode, r_save_area,
1085 GET_MODE_SIZE (Pmode)));
1086
1087 crtl->has_nonlocal_goto = 1;
1088
1089 #ifdef HAVE_nonlocal_goto
1090 /* ??? We no longer need to pass the static chain value, afaik. */
1091 if (HAVE_nonlocal_goto)
1092 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1093 else
1094 #endif
1095 {
1096 r_label = copy_to_reg (r_label);
1097
1098 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1099 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1100
1101 /* Restore frame pointer for containing function. */
1102 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1103 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1104
1105 /* USE of hard_frame_pointer_rtx added for consistency;
1106 not clear if really needed. */
1107 emit_use (hard_frame_pointer_rtx);
1108 emit_use (stack_pointer_rtx);
1109
1110 /* If the architecture is using a GP register, we must
1111 conservatively assume that the target function makes use of it.
1112 The prologue of functions with nonlocal gotos must therefore
1113 initialize the GP register to the appropriate value, and we
1114 must then make sure that this value is live at the point
1115 of the jump. (Note that this doesn't necessarily apply
1116 to targets with a nonlocal_goto pattern; they are free
1117 to implement it in their own way. Note also that this is
1118 a no-op if the GP register is a global invariant.) */
1119 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1120 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1121 emit_use (pic_offset_table_rtx);
1122
1123 emit_indirect_jump (r_label);
1124 }
1125
1126 /* Search backwards to the jump insn and mark it as a
1127 non-local goto. */
1128 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1129 {
1130 if (JUMP_P (insn))
1131 {
1132 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1133 break;
1134 }
1135 else if (CALL_P (insn))
1136 break;
1137 }
1138
1139 return const0_rtx;
1140 }
1141
1142 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1143 (not all will be used on all machines) that was passed to __builtin_setjmp.
1144 It updates the stack pointer in that block to correspond to the current
1145 stack pointer. */
1146
1147 static void
1148 expand_builtin_update_setjmp_buf (rtx buf_addr)
1149 {
1150 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1151 rtx stack_save
1152 = gen_rtx_MEM (sa_mode,
1153 memory_address
1154 (sa_mode,
1155 plus_constant (Pmode, buf_addr,
1156 2 * GET_MODE_SIZE (Pmode))));
1157
1158 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1159 }
1160
1161 /* Expand a call to __builtin_prefetch. For a target that does not support
1162 data prefetch, evaluate the memory address argument in case it has side
1163 effects. */
1164
1165 static void
1166 expand_builtin_prefetch (tree exp)
1167 {
1168 tree arg0, arg1, arg2;
1169 int nargs;
1170 rtx op0, op1, op2;
1171
1172 if (!validate_arglist (exp, POINTER_TYPE, 0))
1173 return;
1174
1175 arg0 = CALL_EXPR_ARG (exp, 0);
1176
1177 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1178 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1179 locality). */
1180 nargs = call_expr_nargs (exp);
1181 if (nargs > 1)
1182 arg1 = CALL_EXPR_ARG (exp, 1);
1183 else
1184 arg1 = integer_zero_node;
1185 if (nargs > 2)
1186 arg2 = CALL_EXPR_ARG (exp, 2);
1187 else
1188 arg2 = integer_three_node;
1189
1190 /* Argument 0 is an address. */
1191 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1192
1193 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1194 if (TREE_CODE (arg1) != INTEGER_CST)
1195 {
1196 error ("second argument to %<__builtin_prefetch%> must be a constant");
1197 arg1 = integer_zero_node;
1198 }
1199 op1 = expand_normal (arg1);
1200 /* Argument 1 must be either zero or one. */
1201 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1202 {
1203 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1204 " using zero");
1205 op1 = const0_rtx;
1206 }
1207
1208 /* Argument 2 (locality) must be a compile-time constant int. */
1209 if (TREE_CODE (arg2) != INTEGER_CST)
1210 {
1211 error ("third argument to %<__builtin_prefetch%> must be a constant");
1212 arg2 = integer_zero_node;
1213 }
1214 op2 = expand_normal (arg2);
1215 /* Argument 2 must be 0, 1, 2, or 3. */
1216 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1217 {
1218 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1219 op2 = const0_rtx;
1220 }
1221
1222 #ifdef HAVE_prefetch
1223 if (HAVE_prefetch)
1224 {
1225 struct expand_operand ops[3];
1226
1227 create_address_operand (&ops[0], op0);
1228 create_integer_operand (&ops[1], INTVAL (op1));
1229 create_integer_operand (&ops[2], INTVAL (op2));
1230 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1231 return;
1232 }
1233 #endif
1234
1235 /* Don't do anything with direct references to volatile memory, but
1236 generate code to handle other side effects. */
1237 if (!MEM_P (op0) && side_effects_p (op0))
1238 emit_insn (op0);
1239 }
1240
1241 /* Get a MEM rtx for expression EXP which is the address of an operand
1242 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1243 the maximum length of the block of memory that might be accessed or
1244 NULL if unknown. */
1245
1246 static rtx
1247 get_memory_rtx (tree exp, tree len)
1248 {
1249 tree orig_exp = exp;
1250 rtx addr, mem;
1251
1252 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1253 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1254 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1255 exp = TREE_OPERAND (exp, 0);
1256
1257 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1258 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1259
1260 /* Get an expression we can use to find the attributes to assign to MEM.
1261 First remove any nops. */
1262 while (CONVERT_EXPR_P (exp)
1263 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1264 exp = TREE_OPERAND (exp, 0);
1265
1266 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1267 (as builtin stringops may alias with anything). */
1268 exp = fold_build2 (MEM_REF,
1269 build_array_type (char_type_node,
1270 build_range_type (sizetype,
1271 size_one_node, len)),
1272 exp, build_int_cst (ptr_type_node, 0));
1273
1274 /* If the MEM_REF has no acceptable address, try to get the base object
1275 from the original address we got, and build an all-aliasing
1276 unknown-sized access to that one. */
1277 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1278 set_mem_attributes (mem, exp, 0);
1279 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1280 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1281 0))))
1282 {
1283 exp = build_fold_addr_expr (exp);
1284 exp = fold_build2 (MEM_REF,
1285 build_array_type (char_type_node,
1286 build_range_type (sizetype,
1287 size_zero_node,
1288 NULL)),
1289 exp, build_int_cst (ptr_type_node, 0));
1290 set_mem_attributes (mem, exp, 0);
1291 }
1292 set_mem_alias_set (mem, 0);
1293 return mem;
1294 }
1295 \f
1296 /* Built-in functions to perform an untyped call and return. */
1297
1298 #define apply_args_mode \
1299 (this_target_builtins->x_apply_args_mode)
1300 #define apply_result_mode \
1301 (this_target_builtins->x_apply_result_mode)
1302
1303 /* Return the size required for the block returned by __builtin_apply_args,
1304 and initialize apply_args_mode. */
1305
1306 static int
1307 apply_args_size (void)
1308 {
1309 static int size = -1;
1310 int align;
1311 unsigned int regno;
1312 enum machine_mode mode;
1313
1314 /* The values computed by this function never change. */
1315 if (size < 0)
1316 {
1317 /* The first value is the incoming arg-pointer. */
1318 size = GET_MODE_SIZE (Pmode);
1319
1320 /* The second value is the structure value address unless this is
1321 passed as an "invisible" first argument. */
1322 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1323 size += GET_MODE_SIZE (Pmode);
1324
1325 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1326 if (FUNCTION_ARG_REGNO_P (regno))
1327 {
1328 mode = targetm.calls.get_raw_arg_mode (regno);
1329
1330 gcc_assert (mode != VOIDmode);
1331
1332 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1333 if (size % align != 0)
1334 size = CEIL (size, align) * align;
1335 size += GET_MODE_SIZE (mode);
1336 apply_args_mode[regno] = mode;
1337 }
1338 else
1339 {
1340 apply_args_mode[regno] = VOIDmode;
1341 }
1342 }
1343 return size;
1344 }
1345
1346 /* Return the size required for the block returned by __builtin_apply,
1347 and initialize apply_result_mode. */
1348
1349 static int
1350 apply_result_size (void)
1351 {
1352 static int size = -1;
1353 int align, regno;
1354 enum machine_mode mode;
1355
1356 /* The values computed by this function never change. */
1357 if (size < 0)
1358 {
1359 size = 0;
1360
1361 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1362 if (targetm.calls.function_value_regno_p (regno))
1363 {
1364 mode = targetm.calls.get_raw_result_mode (regno);
1365
1366 gcc_assert (mode != VOIDmode);
1367
1368 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1369 if (size % align != 0)
1370 size = CEIL (size, align) * align;
1371 size += GET_MODE_SIZE (mode);
1372 apply_result_mode[regno] = mode;
1373 }
1374 else
1375 apply_result_mode[regno] = VOIDmode;
1376
1377 /* Allow targets that use untyped_call and untyped_return to override
1378 the size so that machine-specific information can be stored here. */
1379 #ifdef APPLY_RESULT_SIZE
1380 size = APPLY_RESULT_SIZE;
1381 #endif
1382 }
1383 return size;
1384 }
1385
1386 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1387 /* Create a vector describing the result block RESULT. If SAVEP is true,
1388 the result block is used to save the values; otherwise it is used to
1389 restore the values. */
1390
1391 static rtx
1392 result_vector (int savep, rtx result)
1393 {
1394 int regno, size, align, nelts;
1395 enum machine_mode mode;
1396 rtx reg, mem;
1397 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1398
1399 size = nelts = 0;
1400 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1401 if ((mode = apply_result_mode[regno]) != VOIDmode)
1402 {
1403 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1404 if (size % align != 0)
1405 size = CEIL (size, align) * align;
1406 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1407 mem = adjust_address (result, mode, size);
1408 savevec[nelts++] = (savep
1409 ? gen_rtx_SET (VOIDmode, mem, reg)
1410 : gen_rtx_SET (VOIDmode, reg, mem));
1411 size += GET_MODE_SIZE (mode);
1412 }
1413 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1414 }
1415 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1416
1417 /* Save the state required to perform an untyped call with the same
1418 arguments as were passed to the current function. */
1419
1420 static rtx
1421 expand_builtin_apply_args_1 (void)
1422 {
1423 rtx registers, tem;
1424 int size, align, regno;
1425 enum machine_mode mode;
1426 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1427
1428 /* Create a block where the arg-pointer, structure value address,
1429 and argument registers can be saved. */
1430 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1431
1432 /* Walk past the arg-pointer and structure value address. */
1433 size = GET_MODE_SIZE (Pmode);
1434 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1435 size += GET_MODE_SIZE (Pmode);
1436
1437 /* Save each register used in calling a function to the block. */
1438 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1439 if ((mode = apply_args_mode[regno]) != VOIDmode)
1440 {
1441 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1442 if (size % align != 0)
1443 size = CEIL (size, align) * align;
1444
1445 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1446
1447 emit_move_insn (adjust_address (registers, mode, size), tem);
1448 size += GET_MODE_SIZE (mode);
1449 }
1450
1451 /* Save the arg pointer to the block. */
1452 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1453 #ifdef STACK_GROWS_DOWNWARD
1454 /* We need the pointer as the caller actually passed them to us, not
1455 as we might have pretended they were passed. Make sure it's a valid
1456 operand, as emit_move_insn isn't expected to handle a PLUS. */
1457 tem
1458 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1459 NULL_RTX);
1460 #endif
1461 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1462
1463 size = GET_MODE_SIZE (Pmode);
1464
1465 /* Save the structure value address unless this is passed as an
1466 "invisible" first argument. */
1467 if (struct_incoming_value)
1468 {
1469 emit_move_insn (adjust_address (registers, Pmode, size),
1470 copy_to_reg (struct_incoming_value));
1471 size += GET_MODE_SIZE (Pmode);
1472 }
1473
1474 /* Return the address of the block. */
1475 return copy_addr_to_reg (XEXP (registers, 0));
1476 }
1477
1478 /* __builtin_apply_args returns block of memory allocated on
1479 the stack into which is stored the arg pointer, structure
1480 value address, static chain, and all the registers that might
1481 possibly be used in performing a function call. The code is
1482 moved to the start of the function so the incoming values are
1483 saved. */
1484
1485 static rtx
1486 expand_builtin_apply_args (void)
1487 {
1488 /* Don't do __builtin_apply_args more than once in a function.
1489 Save the result of the first call and reuse it. */
1490 if (apply_args_value != 0)
1491 return apply_args_value;
1492 {
1493 /* When this function is called, it means that registers must be
1494 saved on entry to this function. So we migrate the
1495 call to the first insn of this function. */
1496 rtx temp;
1497 rtx seq;
1498
1499 start_sequence ();
1500 temp = expand_builtin_apply_args_1 ();
1501 seq = get_insns ();
1502 end_sequence ();
1503
1504 apply_args_value = temp;
1505
1506 /* Put the insns after the NOTE that starts the function.
1507 If this is inside a start_sequence, make the outer-level insn
1508 chain current, so the code is placed at the start of the
1509 function. If internal_arg_pointer is a non-virtual pseudo,
1510 it needs to be placed after the function that initializes
1511 that pseudo. */
1512 push_topmost_sequence ();
1513 if (REG_P (crtl->args.internal_arg_pointer)
1514 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1515 emit_insn_before (seq, parm_birth_insn);
1516 else
1517 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1518 pop_topmost_sequence ();
1519 return temp;
1520 }
1521 }
1522
1523 /* Perform an untyped call and save the state required to perform an
1524 untyped return of whatever value was returned by the given function. */
1525
1526 static rtx
1527 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1528 {
1529 int size, align, regno;
1530 enum machine_mode mode;
1531 rtx incoming_args, result, reg, dest, src, call_insn;
1532 rtx old_stack_level = 0;
1533 rtx call_fusage = 0;
1534 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1535
1536 arguments = convert_memory_address (Pmode, arguments);
1537
1538 /* Create a block where the return registers can be saved. */
1539 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1540
1541 /* Fetch the arg pointer from the ARGUMENTS block. */
1542 incoming_args = gen_reg_rtx (Pmode);
1543 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1544 #ifndef STACK_GROWS_DOWNWARD
1545 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1546 incoming_args, 0, OPTAB_LIB_WIDEN);
1547 #endif
1548
1549 /* Push a new argument block and copy the arguments. Do not allow
1550 the (potential) memcpy call below to interfere with our stack
1551 manipulations. */
1552 do_pending_stack_adjust ();
1553 NO_DEFER_POP;
1554
1555 /* Save the stack with nonlocal if available. */
1556 #ifdef HAVE_save_stack_nonlocal
1557 if (HAVE_save_stack_nonlocal)
1558 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1559 else
1560 #endif
1561 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1562
1563 /* Allocate a block of memory onto the stack and copy the memory
1564 arguments to the outgoing arguments address. We can pass TRUE
1565 as the 4th argument because we just saved the stack pointer
1566 and will restore it right after the call. */
1567 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1568
1569 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1570 may have already set current_function_calls_alloca to true.
1571 current_function_calls_alloca won't be set if argsize is zero,
1572 so we have to guarantee need_drap is true here. */
1573 if (SUPPORTS_STACK_ALIGNMENT)
1574 crtl->need_drap = true;
1575
1576 dest = virtual_outgoing_args_rtx;
1577 #ifndef STACK_GROWS_DOWNWARD
1578 if (CONST_INT_P (argsize))
1579 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1580 else
1581 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1582 #endif
1583 dest = gen_rtx_MEM (BLKmode, dest);
1584 set_mem_align (dest, PARM_BOUNDARY);
1585 src = gen_rtx_MEM (BLKmode, incoming_args);
1586 set_mem_align (src, PARM_BOUNDARY);
1587 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1588
1589 /* Refer to the argument block. */
1590 apply_args_size ();
1591 arguments = gen_rtx_MEM (BLKmode, arguments);
1592 set_mem_align (arguments, PARM_BOUNDARY);
1593
1594 /* Walk past the arg-pointer and structure value address. */
1595 size = GET_MODE_SIZE (Pmode);
1596 if (struct_value)
1597 size += GET_MODE_SIZE (Pmode);
1598
1599 /* Restore each of the registers previously saved. Make USE insns
1600 for each of these registers for use in making the call. */
1601 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1602 if ((mode = apply_args_mode[regno]) != VOIDmode)
1603 {
1604 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1605 if (size % align != 0)
1606 size = CEIL (size, align) * align;
1607 reg = gen_rtx_REG (mode, regno);
1608 emit_move_insn (reg, adjust_address (arguments, mode, size));
1609 use_reg (&call_fusage, reg);
1610 size += GET_MODE_SIZE (mode);
1611 }
1612
1613 /* Restore the structure value address unless this is passed as an
1614 "invisible" first argument. */
1615 size = GET_MODE_SIZE (Pmode);
1616 if (struct_value)
1617 {
1618 rtx value = gen_reg_rtx (Pmode);
1619 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1620 emit_move_insn (struct_value, value);
1621 if (REG_P (struct_value))
1622 use_reg (&call_fusage, struct_value);
1623 size += GET_MODE_SIZE (Pmode);
1624 }
1625
1626 /* All arguments and registers used for the call are set up by now! */
1627 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1628
1629 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1630 and we don't want to load it into a register as an optimization,
1631 because prepare_call_address already did it if it should be done. */
1632 if (GET_CODE (function) != SYMBOL_REF)
1633 function = memory_address (FUNCTION_MODE, function);
1634
1635 /* Generate the actual call instruction and save the return value. */
1636 #ifdef HAVE_untyped_call
1637 if (HAVE_untyped_call)
1638 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1639 result, result_vector (1, result)));
1640 else
1641 #endif
1642 #ifdef HAVE_call_value
1643 if (HAVE_call_value)
1644 {
1645 rtx valreg = 0;
1646
1647 /* Locate the unique return register. It is not possible to
1648 express a call that sets more than one return register using
1649 call_value; use untyped_call for that. In fact, untyped_call
1650 only needs to save the return registers in the given block. */
1651 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1652 if ((mode = apply_result_mode[regno]) != VOIDmode)
1653 {
1654 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1655
1656 valreg = gen_rtx_REG (mode, regno);
1657 }
1658
1659 emit_call_insn (GEN_CALL_VALUE (valreg,
1660 gen_rtx_MEM (FUNCTION_MODE, function),
1661 const0_rtx, NULL_RTX, const0_rtx));
1662
1663 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1664 }
1665 else
1666 #endif
1667 gcc_unreachable ();
1668
1669 /* Find the CALL insn we just emitted, and attach the register usage
1670 information. */
1671 call_insn = last_call_insn ();
1672 add_function_usage_to (call_insn, call_fusage);
1673
1674 /* Restore the stack. */
1675 #ifdef HAVE_save_stack_nonlocal
1676 if (HAVE_save_stack_nonlocal)
1677 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1678 else
1679 #endif
1680 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1681 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1682
1683 OK_DEFER_POP;
1684
1685 /* Return the address of the result block. */
1686 result = copy_addr_to_reg (XEXP (result, 0));
1687 return convert_memory_address (ptr_mode, result);
1688 }
1689
1690 /* Perform an untyped return. */
1691
1692 static void
1693 expand_builtin_return (rtx result)
1694 {
1695 int size, align, regno;
1696 enum machine_mode mode;
1697 rtx reg;
1698 rtx call_fusage = 0;
1699
1700 result = convert_memory_address (Pmode, result);
1701
1702 apply_result_size ();
1703 result = gen_rtx_MEM (BLKmode, result);
1704
1705 #ifdef HAVE_untyped_return
1706 if (HAVE_untyped_return)
1707 {
1708 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1709 emit_barrier ();
1710 return;
1711 }
1712 #endif
1713
1714 /* Restore the return value and note that each value is used. */
1715 size = 0;
1716 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1717 if ((mode = apply_result_mode[regno]) != VOIDmode)
1718 {
1719 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1720 if (size % align != 0)
1721 size = CEIL (size, align) * align;
1722 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1723 emit_move_insn (reg, adjust_address (result, mode, size));
1724
1725 push_to_sequence (call_fusage);
1726 emit_use (reg);
1727 call_fusage = get_insns ();
1728 end_sequence ();
1729 size += GET_MODE_SIZE (mode);
1730 }
1731
1732 /* Put the USE insns before the return. */
1733 emit_insn (call_fusage);
1734
1735 /* Return whatever values was restored by jumping directly to the end
1736 of the function. */
1737 expand_naked_return ();
1738 }
1739
1740 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1741
1742 static enum type_class
1743 type_to_class (tree type)
1744 {
1745 switch (TREE_CODE (type))
1746 {
1747 case VOID_TYPE: return void_type_class;
1748 case INTEGER_TYPE: return integer_type_class;
1749 case ENUMERAL_TYPE: return enumeral_type_class;
1750 case BOOLEAN_TYPE: return boolean_type_class;
1751 case POINTER_TYPE: return pointer_type_class;
1752 case REFERENCE_TYPE: return reference_type_class;
1753 case OFFSET_TYPE: return offset_type_class;
1754 case REAL_TYPE: return real_type_class;
1755 case COMPLEX_TYPE: return complex_type_class;
1756 case FUNCTION_TYPE: return function_type_class;
1757 case METHOD_TYPE: return method_type_class;
1758 case RECORD_TYPE: return record_type_class;
1759 case UNION_TYPE:
1760 case QUAL_UNION_TYPE: return union_type_class;
1761 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1762 ? string_type_class : array_type_class);
1763 case LANG_TYPE: return lang_type_class;
1764 default: return no_type_class;
1765 }
1766 }
1767
1768 /* Expand a call EXP to __builtin_classify_type. */
1769
1770 static rtx
1771 expand_builtin_classify_type (tree exp)
1772 {
1773 if (call_expr_nargs (exp))
1774 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1775 return GEN_INT (no_type_class);
1776 }
1777
1778 /* This helper macro, meant to be used in mathfn_built_in below,
1779 determines which among a set of three builtin math functions is
1780 appropriate for a given type mode. The `F' and `L' cases are
1781 automatically generated from the `double' case. */
1782 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1783 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1784 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1785 fcodel = BUILT_IN_MATHFN##L ; break;
1786 /* Similar to above, but appends _R after any F/L suffix. */
1787 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1788 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1789 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1790 fcodel = BUILT_IN_MATHFN##L_R ; break;
1791
1792 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1793 if available. If IMPLICIT is true use the implicit builtin declaration,
1794 otherwise use the explicit declaration. If we can't do the conversion,
1795 return zero. */
1796
1797 static tree
1798 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1799 {
1800 enum built_in_function fcode, fcodef, fcodel, fcode2;
1801
1802 switch (fn)
1803 {
1804 CASE_MATHFN (BUILT_IN_ACOS)
1805 CASE_MATHFN (BUILT_IN_ACOSH)
1806 CASE_MATHFN (BUILT_IN_ASIN)
1807 CASE_MATHFN (BUILT_IN_ASINH)
1808 CASE_MATHFN (BUILT_IN_ATAN)
1809 CASE_MATHFN (BUILT_IN_ATAN2)
1810 CASE_MATHFN (BUILT_IN_ATANH)
1811 CASE_MATHFN (BUILT_IN_CBRT)
1812 CASE_MATHFN (BUILT_IN_CEIL)
1813 CASE_MATHFN (BUILT_IN_CEXPI)
1814 CASE_MATHFN (BUILT_IN_COPYSIGN)
1815 CASE_MATHFN (BUILT_IN_COS)
1816 CASE_MATHFN (BUILT_IN_COSH)
1817 CASE_MATHFN (BUILT_IN_DREM)
1818 CASE_MATHFN (BUILT_IN_ERF)
1819 CASE_MATHFN (BUILT_IN_ERFC)
1820 CASE_MATHFN (BUILT_IN_EXP)
1821 CASE_MATHFN (BUILT_IN_EXP10)
1822 CASE_MATHFN (BUILT_IN_EXP2)
1823 CASE_MATHFN (BUILT_IN_EXPM1)
1824 CASE_MATHFN (BUILT_IN_FABS)
1825 CASE_MATHFN (BUILT_IN_FDIM)
1826 CASE_MATHFN (BUILT_IN_FLOOR)
1827 CASE_MATHFN (BUILT_IN_FMA)
1828 CASE_MATHFN (BUILT_IN_FMAX)
1829 CASE_MATHFN (BUILT_IN_FMIN)
1830 CASE_MATHFN (BUILT_IN_FMOD)
1831 CASE_MATHFN (BUILT_IN_FREXP)
1832 CASE_MATHFN (BUILT_IN_GAMMA)
1833 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1834 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1835 CASE_MATHFN (BUILT_IN_HYPOT)
1836 CASE_MATHFN (BUILT_IN_ILOGB)
1837 CASE_MATHFN (BUILT_IN_ICEIL)
1838 CASE_MATHFN (BUILT_IN_IFLOOR)
1839 CASE_MATHFN (BUILT_IN_INF)
1840 CASE_MATHFN (BUILT_IN_IRINT)
1841 CASE_MATHFN (BUILT_IN_IROUND)
1842 CASE_MATHFN (BUILT_IN_ISINF)
1843 CASE_MATHFN (BUILT_IN_J0)
1844 CASE_MATHFN (BUILT_IN_J1)
1845 CASE_MATHFN (BUILT_IN_JN)
1846 CASE_MATHFN (BUILT_IN_LCEIL)
1847 CASE_MATHFN (BUILT_IN_LDEXP)
1848 CASE_MATHFN (BUILT_IN_LFLOOR)
1849 CASE_MATHFN (BUILT_IN_LGAMMA)
1850 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1851 CASE_MATHFN (BUILT_IN_LLCEIL)
1852 CASE_MATHFN (BUILT_IN_LLFLOOR)
1853 CASE_MATHFN (BUILT_IN_LLRINT)
1854 CASE_MATHFN (BUILT_IN_LLROUND)
1855 CASE_MATHFN (BUILT_IN_LOG)
1856 CASE_MATHFN (BUILT_IN_LOG10)
1857 CASE_MATHFN (BUILT_IN_LOG1P)
1858 CASE_MATHFN (BUILT_IN_LOG2)
1859 CASE_MATHFN (BUILT_IN_LOGB)
1860 CASE_MATHFN (BUILT_IN_LRINT)
1861 CASE_MATHFN (BUILT_IN_LROUND)
1862 CASE_MATHFN (BUILT_IN_MODF)
1863 CASE_MATHFN (BUILT_IN_NAN)
1864 CASE_MATHFN (BUILT_IN_NANS)
1865 CASE_MATHFN (BUILT_IN_NEARBYINT)
1866 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1867 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1868 CASE_MATHFN (BUILT_IN_POW)
1869 CASE_MATHFN (BUILT_IN_POWI)
1870 CASE_MATHFN (BUILT_IN_POW10)
1871 CASE_MATHFN (BUILT_IN_REMAINDER)
1872 CASE_MATHFN (BUILT_IN_REMQUO)
1873 CASE_MATHFN (BUILT_IN_RINT)
1874 CASE_MATHFN (BUILT_IN_ROUND)
1875 CASE_MATHFN (BUILT_IN_SCALB)
1876 CASE_MATHFN (BUILT_IN_SCALBLN)
1877 CASE_MATHFN (BUILT_IN_SCALBN)
1878 CASE_MATHFN (BUILT_IN_SIGNBIT)
1879 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1880 CASE_MATHFN (BUILT_IN_SIN)
1881 CASE_MATHFN (BUILT_IN_SINCOS)
1882 CASE_MATHFN (BUILT_IN_SINH)
1883 CASE_MATHFN (BUILT_IN_SQRT)
1884 CASE_MATHFN (BUILT_IN_TAN)
1885 CASE_MATHFN (BUILT_IN_TANH)
1886 CASE_MATHFN (BUILT_IN_TGAMMA)
1887 CASE_MATHFN (BUILT_IN_TRUNC)
1888 CASE_MATHFN (BUILT_IN_Y0)
1889 CASE_MATHFN (BUILT_IN_Y1)
1890 CASE_MATHFN (BUILT_IN_YN)
1891
1892 default:
1893 return NULL_TREE;
1894 }
1895
1896 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1897 fcode2 = fcode;
1898 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1899 fcode2 = fcodef;
1900 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1901 fcode2 = fcodel;
1902 else
1903 return NULL_TREE;
1904
1905 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1906 return NULL_TREE;
1907
1908 return builtin_decl_explicit (fcode2);
1909 }
1910
1911 /* Like mathfn_built_in_1(), but always use the implicit array. */
1912
1913 tree
1914 mathfn_built_in (tree type, enum built_in_function fn)
1915 {
1916 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1917 }
1918
1919 /* If errno must be maintained, expand the RTL to check if the result,
1920 TARGET, of a built-in function call, EXP, is NaN, and if so set
1921 errno to EDOM. */
1922
1923 static void
1924 expand_errno_check (tree exp, rtx target)
1925 {
1926 rtx lab = gen_label_rtx ();
1927
1928 /* Test the result; if it is NaN, set errno=EDOM because
1929 the argument was not in the domain. */
1930 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1931 NULL_RTX, NULL_RTX, lab,
1932 /* The jump is very likely. */
1933 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1934
1935 #ifdef TARGET_EDOM
1936 /* If this built-in doesn't throw an exception, set errno directly. */
1937 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1938 {
1939 #ifdef GEN_ERRNO_RTX
1940 rtx errno_rtx = GEN_ERRNO_RTX;
1941 #else
1942 rtx errno_rtx
1943 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1944 #endif
1945 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1946 emit_label (lab);
1947 return;
1948 }
1949 #endif
1950
1951 /* Make sure the library call isn't expanded as a tail call. */
1952 CALL_EXPR_TAILCALL (exp) = 0;
1953
1954 /* We can't set errno=EDOM directly; let the library call do it.
1955 Pop the arguments right away in case the call gets deleted. */
1956 NO_DEFER_POP;
1957 expand_call (exp, target, 0);
1958 OK_DEFER_POP;
1959 emit_label (lab);
1960 }
1961
1962 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1963 Return NULL_RTX if a normal call should be emitted rather than expanding
1964 the function in-line. EXP is the expression that is a call to the builtin
1965 function; if convenient, the result should be placed in TARGET.
1966 SUBTARGET may be used as the target for computing one of EXP's operands. */
1967
1968 static rtx
1969 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1970 {
1971 optab builtin_optab;
1972 rtx op0, insns;
1973 tree fndecl = get_callee_fndecl (exp);
1974 enum machine_mode mode;
1975 bool errno_set = false;
1976 bool try_widening = false;
1977 tree arg;
1978
1979 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1980 return NULL_RTX;
1981
1982 arg = CALL_EXPR_ARG (exp, 0);
1983
1984 switch (DECL_FUNCTION_CODE (fndecl))
1985 {
1986 CASE_FLT_FN (BUILT_IN_SQRT):
1987 errno_set = ! tree_expr_nonnegative_p (arg);
1988 try_widening = true;
1989 builtin_optab = sqrt_optab;
1990 break;
1991 CASE_FLT_FN (BUILT_IN_EXP):
1992 errno_set = true; builtin_optab = exp_optab; break;
1993 CASE_FLT_FN (BUILT_IN_EXP10):
1994 CASE_FLT_FN (BUILT_IN_POW10):
1995 errno_set = true; builtin_optab = exp10_optab; break;
1996 CASE_FLT_FN (BUILT_IN_EXP2):
1997 errno_set = true; builtin_optab = exp2_optab; break;
1998 CASE_FLT_FN (BUILT_IN_EXPM1):
1999 errno_set = true; builtin_optab = expm1_optab; break;
2000 CASE_FLT_FN (BUILT_IN_LOGB):
2001 errno_set = true; builtin_optab = logb_optab; break;
2002 CASE_FLT_FN (BUILT_IN_LOG):
2003 errno_set = true; builtin_optab = log_optab; break;
2004 CASE_FLT_FN (BUILT_IN_LOG10):
2005 errno_set = true; builtin_optab = log10_optab; break;
2006 CASE_FLT_FN (BUILT_IN_LOG2):
2007 errno_set = true; builtin_optab = log2_optab; break;
2008 CASE_FLT_FN (BUILT_IN_LOG1P):
2009 errno_set = true; builtin_optab = log1p_optab; break;
2010 CASE_FLT_FN (BUILT_IN_ASIN):
2011 builtin_optab = asin_optab; break;
2012 CASE_FLT_FN (BUILT_IN_ACOS):
2013 builtin_optab = acos_optab; break;
2014 CASE_FLT_FN (BUILT_IN_TAN):
2015 builtin_optab = tan_optab; break;
2016 CASE_FLT_FN (BUILT_IN_ATAN):
2017 builtin_optab = atan_optab; break;
2018 CASE_FLT_FN (BUILT_IN_FLOOR):
2019 builtin_optab = floor_optab; break;
2020 CASE_FLT_FN (BUILT_IN_CEIL):
2021 builtin_optab = ceil_optab; break;
2022 CASE_FLT_FN (BUILT_IN_TRUNC):
2023 builtin_optab = btrunc_optab; break;
2024 CASE_FLT_FN (BUILT_IN_ROUND):
2025 builtin_optab = round_optab; break;
2026 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2027 builtin_optab = nearbyint_optab;
2028 if (flag_trapping_math)
2029 break;
2030 /* Else fallthrough and expand as rint. */
2031 CASE_FLT_FN (BUILT_IN_RINT):
2032 builtin_optab = rint_optab; break;
2033 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2034 builtin_optab = significand_optab; break;
2035 default:
2036 gcc_unreachable ();
2037 }
2038
2039 /* Make a suitable register to place result in. */
2040 mode = TYPE_MODE (TREE_TYPE (exp));
2041
2042 if (! flag_errno_math || ! HONOR_NANS (mode))
2043 errno_set = false;
2044
2045 /* Before working hard, check whether the instruction is available, but try
2046 to widen the mode for specific operations. */
2047 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2048 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2049 && (!errno_set || !optimize_insn_for_size_p ()))
2050 {
2051 rtx result = gen_reg_rtx (mode);
2052
2053 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2054 need to expand the argument again. This way, we will not perform
2055 side-effects more the once. */
2056 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2057
2058 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2059
2060 start_sequence ();
2061
2062 /* Compute into RESULT.
2063 Set RESULT to wherever the result comes back. */
2064 result = expand_unop (mode, builtin_optab, op0, result, 0);
2065
2066 if (result != 0)
2067 {
2068 if (errno_set)
2069 expand_errno_check (exp, result);
2070
2071 /* Output the entire sequence. */
2072 insns = get_insns ();
2073 end_sequence ();
2074 emit_insn (insns);
2075 return result;
2076 }
2077
2078 /* If we were unable to expand via the builtin, stop the sequence
2079 (without outputting the insns) and call to the library function
2080 with the stabilized argument list. */
2081 end_sequence ();
2082 }
2083
2084 return expand_call (exp, target, target == const0_rtx);
2085 }
2086
2087 /* Expand a call to the builtin binary math functions (pow and atan2).
2088 Return NULL_RTX if a normal call should be emitted rather than expanding the
2089 function in-line. EXP is the expression that is a call to the builtin
2090 function; if convenient, the result should be placed in TARGET.
2091 SUBTARGET may be used as the target for computing one of EXP's
2092 operands. */
2093
2094 static rtx
2095 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2096 {
2097 optab builtin_optab;
2098 rtx op0, op1, insns, result;
2099 int op1_type = REAL_TYPE;
2100 tree fndecl = get_callee_fndecl (exp);
2101 tree arg0, arg1;
2102 enum machine_mode mode;
2103 bool errno_set = true;
2104
2105 switch (DECL_FUNCTION_CODE (fndecl))
2106 {
2107 CASE_FLT_FN (BUILT_IN_SCALBN):
2108 CASE_FLT_FN (BUILT_IN_SCALBLN):
2109 CASE_FLT_FN (BUILT_IN_LDEXP):
2110 op1_type = INTEGER_TYPE;
2111 default:
2112 break;
2113 }
2114
2115 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2116 return NULL_RTX;
2117
2118 arg0 = CALL_EXPR_ARG (exp, 0);
2119 arg1 = CALL_EXPR_ARG (exp, 1);
2120
2121 switch (DECL_FUNCTION_CODE (fndecl))
2122 {
2123 CASE_FLT_FN (BUILT_IN_POW):
2124 builtin_optab = pow_optab; break;
2125 CASE_FLT_FN (BUILT_IN_ATAN2):
2126 builtin_optab = atan2_optab; break;
2127 CASE_FLT_FN (BUILT_IN_SCALB):
2128 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2129 return 0;
2130 builtin_optab = scalb_optab; break;
2131 CASE_FLT_FN (BUILT_IN_SCALBN):
2132 CASE_FLT_FN (BUILT_IN_SCALBLN):
2133 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2134 return 0;
2135 /* Fall through... */
2136 CASE_FLT_FN (BUILT_IN_LDEXP):
2137 builtin_optab = ldexp_optab; break;
2138 CASE_FLT_FN (BUILT_IN_FMOD):
2139 builtin_optab = fmod_optab; break;
2140 CASE_FLT_FN (BUILT_IN_REMAINDER):
2141 CASE_FLT_FN (BUILT_IN_DREM):
2142 builtin_optab = remainder_optab; break;
2143 default:
2144 gcc_unreachable ();
2145 }
2146
2147 /* Make a suitable register to place result in. */
2148 mode = TYPE_MODE (TREE_TYPE (exp));
2149
2150 /* Before working hard, check whether the instruction is available. */
2151 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2152 return NULL_RTX;
2153
2154 result = gen_reg_rtx (mode);
2155
2156 if (! flag_errno_math || ! HONOR_NANS (mode))
2157 errno_set = false;
2158
2159 if (errno_set && optimize_insn_for_size_p ())
2160 return 0;
2161
2162 /* Always stabilize the argument list. */
2163 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2164 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2165
2166 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2167 op1 = expand_normal (arg1);
2168
2169 start_sequence ();
2170
2171 /* Compute into RESULT.
2172 Set RESULT to wherever the result comes back. */
2173 result = expand_binop (mode, builtin_optab, op0, op1,
2174 result, 0, OPTAB_DIRECT);
2175
2176 /* If we were unable to expand via the builtin, stop the sequence
2177 (without outputting the insns) and call to the library function
2178 with the stabilized argument list. */
2179 if (result == 0)
2180 {
2181 end_sequence ();
2182 return expand_call (exp, target, target == const0_rtx);
2183 }
2184
2185 if (errno_set)
2186 expand_errno_check (exp, result);
2187
2188 /* Output the entire sequence. */
2189 insns = get_insns ();
2190 end_sequence ();
2191 emit_insn (insns);
2192
2193 return result;
2194 }
2195
2196 /* Expand a call to the builtin trinary math functions (fma).
2197 Return NULL_RTX if a normal call should be emitted rather than expanding the
2198 function in-line. EXP is the expression that is a call to the builtin
2199 function; if convenient, the result should be placed in TARGET.
2200 SUBTARGET may be used as the target for computing one of EXP's
2201 operands. */
2202
2203 static rtx
2204 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2205 {
2206 optab builtin_optab;
2207 rtx op0, op1, op2, insns, result;
2208 tree fndecl = get_callee_fndecl (exp);
2209 tree arg0, arg1, arg2;
2210 enum machine_mode mode;
2211
2212 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2213 return NULL_RTX;
2214
2215 arg0 = CALL_EXPR_ARG (exp, 0);
2216 arg1 = CALL_EXPR_ARG (exp, 1);
2217 arg2 = CALL_EXPR_ARG (exp, 2);
2218
2219 switch (DECL_FUNCTION_CODE (fndecl))
2220 {
2221 CASE_FLT_FN (BUILT_IN_FMA):
2222 builtin_optab = fma_optab; break;
2223 default:
2224 gcc_unreachable ();
2225 }
2226
2227 /* Make a suitable register to place result in. */
2228 mode = TYPE_MODE (TREE_TYPE (exp));
2229
2230 /* Before working hard, check whether the instruction is available. */
2231 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2232 return NULL_RTX;
2233
2234 result = gen_reg_rtx (mode);
2235
2236 /* Always stabilize the argument list. */
2237 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2238 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2239 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2240
2241 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2242 op1 = expand_normal (arg1);
2243 op2 = expand_normal (arg2);
2244
2245 start_sequence ();
2246
2247 /* Compute into RESULT.
2248 Set RESULT to wherever the result comes back. */
2249 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2250 result, 0);
2251
2252 /* If we were unable to expand via the builtin, stop the sequence
2253 (without outputting the insns) and call to the library function
2254 with the stabilized argument list. */
2255 if (result == 0)
2256 {
2257 end_sequence ();
2258 return expand_call (exp, target, target == const0_rtx);
2259 }
2260
2261 /* Output the entire sequence. */
2262 insns = get_insns ();
2263 end_sequence ();
2264 emit_insn (insns);
2265
2266 return result;
2267 }
2268
2269 /* Expand a call to the builtin sin and cos math functions.
2270 Return NULL_RTX if a normal call should be emitted rather than expanding the
2271 function in-line. EXP is the expression that is a call to the builtin
2272 function; if convenient, the result should be placed in TARGET.
2273 SUBTARGET may be used as the target for computing one of EXP's
2274 operands. */
2275
2276 static rtx
2277 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2278 {
2279 optab builtin_optab;
2280 rtx op0, insns;
2281 tree fndecl = get_callee_fndecl (exp);
2282 enum machine_mode mode;
2283 tree arg;
2284
2285 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2286 return NULL_RTX;
2287
2288 arg = CALL_EXPR_ARG (exp, 0);
2289
2290 switch (DECL_FUNCTION_CODE (fndecl))
2291 {
2292 CASE_FLT_FN (BUILT_IN_SIN):
2293 CASE_FLT_FN (BUILT_IN_COS):
2294 builtin_optab = sincos_optab; break;
2295 default:
2296 gcc_unreachable ();
2297 }
2298
2299 /* Make a suitable register to place result in. */
2300 mode = TYPE_MODE (TREE_TYPE (exp));
2301
2302 /* Check if sincos insn is available, otherwise fallback
2303 to sin or cos insn. */
2304 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2305 switch (DECL_FUNCTION_CODE (fndecl))
2306 {
2307 CASE_FLT_FN (BUILT_IN_SIN):
2308 builtin_optab = sin_optab; break;
2309 CASE_FLT_FN (BUILT_IN_COS):
2310 builtin_optab = cos_optab; break;
2311 default:
2312 gcc_unreachable ();
2313 }
2314
2315 /* Before working hard, check whether the instruction is available. */
2316 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2317 {
2318 rtx result = gen_reg_rtx (mode);
2319
2320 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2321 need to expand the argument again. This way, we will not perform
2322 side-effects more the once. */
2323 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2324
2325 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2326
2327 start_sequence ();
2328
2329 /* Compute into RESULT.
2330 Set RESULT to wherever the result comes back. */
2331 if (builtin_optab == sincos_optab)
2332 {
2333 int ok;
2334
2335 switch (DECL_FUNCTION_CODE (fndecl))
2336 {
2337 CASE_FLT_FN (BUILT_IN_SIN):
2338 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2339 break;
2340 CASE_FLT_FN (BUILT_IN_COS):
2341 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2342 break;
2343 default:
2344 gcc_unreachable ();
2345 }
2346 gcc_assert (ok);
2347 }
2348 else
2349 result = expand_unop (mode, builtin_optab, op0, result, 0);
2350
2351 if (result != 0)
2352 {
2353 /* Output the entire sequence. */
2354 insns = get_insns ();
2355 end_sequence ();
2356 emit_insn (insns);
2357 return result;
2358 }
2359
2360 /* If we were unable to expand via the builtin, stop the sequence
2361 (without outputting the insns) and call to the library function
2362 with the stabilized argument list. */
2363 end_sequence ();
2364 }
2365
2366 return expand_call (exp, target, target == const0_rtx);
2367 }
2368
2369 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2370 return an RTL instruction code that implements the functionality.
2371 If that isn't possible or available return CODE_FOR_nothing. */
2372
2373 static enum insn_code
2374 interclass_mathfn_icode (tree arg, tree fndecl)
2375 {
2376 bool errno_set = false;
2377 optab builtin_optab = unknown_optab;
2378 enum machine_mode mode;
2379
2380 switch (DECL_FUNCTION_CODE (fndecl))
2381 {
2382 CASE_FLT_FN (BUILT_IN_ILOGB):
2383 errno_set = true; builtin_optab = ilogb_optab; break;
2384 CASE_FLT_FN (BUILT_IN_ISINF):
2385 builtin_optab = isinf_optab; break;
2386 case BUILT_IN_ISNORMAL:
2387 case BUILT_IN_ISFINITE:
2388 CASE_FLT_FN (BUILT_IN_FINITE):
2389 case BUILT_IN_FINITED32:
2390 case BUILT_IN_FINITED64:
2391 case BUILT_IN_FINITED128:
2392 case BUILT_IN_ISINFD32:
2393 case BUILT_IN_ISINFD64:
2394 case BUILT_IN_ISINFD128:
2395 /* These builtins have no optabs (yet). */
2396 break;
2397 default:
2398 gcc_unreachable ();
2399 }
2400
2401 /* There's no easy way to detect the case we need to set EDOM. */
2402 if (flag_errno_math && errno_set)
2403 return CODE_FOR_nothing;
2404
2405 /* Optab mode depends on the mode of the input argument. */
2406 mode = TYPE_MODE (TREE_TYPE (arg));
2407
2408 if (builtin_optab)
2409 return optab_handler (builtin_optab, mode);
2410 return CODE_FOR_nothing;
2411 }
2412
2413 /* Expand a call to one of the builtin math functions that operate on
2414 floating point argument and output an integer result (ilogb, isinf,
2415 isnan, etc).
2416 Return 0 if a normal call should be emitted rather than expanding the
2417 function in-line. EXP is the expression that is a call to the builtin
2418 function; if convenient, the result should be placed in TARGET. */
2419
2420 static rtx
2421 expand_builtin_interclass_mathfn (tree exp, rtx target)
2422 {
2423 enum insn_code icode = CODE_FOR_nothing;
2424 rtx op0;
2425 tree fndecl = get_callee_fndecl (exp);
2426 enum machine_mode mode;
2427 tree arg;
2428
2429 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2430 return NULL_RTX;
2431
2432 arg = CALL_EXPR_ARG (exp, 0);
2433 icode = interclass_mathfn_icode (arg, fndecl);
2434 mode = TYPE_MODE (TREE_TYPE (arg));
2435
2436 if (icode != CODE_FOR_nothing)
2437 {
2438 struct expand_operand ops[1];
2439 rtx last = get_last_insn ();
2440 tree orig_arg = arg;
2441
2442 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2443 need to expand the argument again. This way, we will not perform
2444 side-effects more the once. */
2445 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2446
2447 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2448
2449 if (mode != GET_MODE (op0))
2450 op0 = convert_to_mode (mode, op0, 0);
2451
2452 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2453 if (maybe_legitimize_operands (icode, 0, 1, ops)
2454 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2455 return ops[0].value;
2456
2457 delete_insns_since (last);
2458 CALL_EXPR_ARG (exp, 0) = orig_arg;
2459 }
2460
2461 return NULL_RTX;
2462 }
2463
2464 /* Expand a call to the builtin sincos math function.
2465 Return NULL_RTX if a normal call should be emitted rather than expanding the
2466 function in-line. EXP is the expression that is a call to the builtin
2467 function. */
2468
2469 static rtx
2470 expand_builtin_sincos (tree exp)
2471 {
2472 rtx op0, op1, op2, target1, target2;
2473 enum machine_mode mode;
2474 tree arg, sinp, cosp;
2475 int result;
2476 location_t loc = EXPR_LOCATION (exp);
2477 tree alias_type, alias_off;
2478
2479 if (!validate_arglist (exp, REAL_TYPE,
2480 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2481 return NULL_RTX;
2482
2483 arg = CALL_EXPR_ARG (exp, 0);
2484 sinp = CALL_EXPR_ARG (exp, 1);
2485 cosp = CALL_EXPR_ARG (exp, 2);
2486
2487 /* Make a suitable register to place result in. */
2488 mode = TYPE_MODE (TREE_TYPE (arg));
2489
2490 /* Check if sincos insn is available, otherwise emit the call. */
2491 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2492 return NULL_RTX;
2493
2494 target1 = gen_reg_rtx (mode);
2495 target2 = gen_reg_rtx (mode);
2496
2497 op0 = expand_normal (arg);
2498 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2499 alias_off = build_int_cst (alias_type, 0);
2500 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2501 sinp, alias_off));
2502 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2503 cosp, alias_off));
2504
2505 /* Compute into target1 and target2.
2506 Set TARGET to wherever the result comes back. */
2507 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2508 gcc_assert (result);
2509
2510 /* Move target1 and target2 to the memory locations indicated
2511 by op1 and op2. */
2512 emit_move_insn (op1, target1);
2513 emit_move_insn (op2, target2);
2514
2515 return const0_rtx;
2516 }
2517
2518 /* Expand a call to the internal cexpi builtin to the sincos math function.
2519 EXP is the expression that is a call to the builtin function; if convenient,
2520 the result should be placed in TARGET. */
2521
2522 static rtx
2523 expand_builtin_cexpi (tree exp, rtx target)
2524 {
2525 tree fndecl = get_callee_fndecl (exp);
2526 tree arg, type;
2527 enum machine_mode mode;
2528 rtx op0, op1, op2;
2529 location_t loc = EXPR_LOCATION (exp);
2530
2531 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2532 return NULL_RTX;
2533
2534 arg = CALL_EXPR_ARG (exp, 0);
2535 type = TREE_TYPE (arg);
2536 mode = TYPE_MODE (TREE_TYPE (arg));
2537
2538 /* Try expanding via a sincos optab, fall back to emitting a libcall
2539 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2540 is only generated from sincos, cexp or if we have either of them. */
2541 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2542 {
2543 op1 = gen_reg_rtx (mode);
2544 op2 = gen_reg_rtx (mode);
2545
2546 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2547
2548 /* Compute into op1 and op2. */
2549 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2550 }
2551 else if (TARGET_HAS_SINCOS)
2552 {
2553 tree call, fn = NULL_TREE;
2554 tree top1, top2;
2555 rtx op1a, op2a;
2556
2557 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2558 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2559 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2560 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2561 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2562 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2563 else
2564 gcc_unreachable ();
2565
2566 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2567 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2568 op1a = copy_addr_to_reg (XEXP (op1, 0));
2569 op2a = copy_addr_to_reg (XEXP (op2, 0));
2570 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2571 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2572
2573 /* Make sure not to fold the sincos call again. */
2574 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2575 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2576 call, 3, arg, top1, top2));
2577 }
2578 else
2579 {
2580 tree call, fn = NULL_TREE, narg;
2581 tree ctype = build_complex_type (type);
2582
2583 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2584 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2585 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2586 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2588 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2589 else
2590 gcc_unreachable ();
2591
2592 /* If we don't have a decl for cexp create one. This is the
2593 friendliest fallback if the user calls __builtin_cexpi
2594 without full target C99 function support. */
2595 if (fn == NULL_TREE)
2596 {
2597 tree fntype;
2598 const char *name = NULL;
2599
2600 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2601 name = "cexpf";
2602 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2603 name = "cexp";
2604 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2605 name = "cexpl";
2606
2607 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2608 fn = build_fn_decl (name, fntype);
2609 }
2610
2611 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2612 build_real (type, dconst0), arg);
2613
2614 /* Make sure not to fold the cexp call again. */
2615 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2616 return expand_expr (build_call_nary (ctype, call, 1, narg),
2617 target, VOIDmode, EXPAND_NORMAL);
2618 }
2619
2620 /* Now build the proper return type. */
2621 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2622 make_tree (TREE_TYPE (arg), op2),
2623 make_tree (TREE_TYPE (arg), op1)),
2624 target, VOIDmode, EXPAND_NORMAL);
2625 }
2626
2627 /* Conveniently construct a function call expression. FNDECL names the
2628 function to be called, N is the number of arguments, and the "..."
2629 parameters are the argument expressions. Unlike build_call_exr
2630 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2631
2632 static tree
2633 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2634 {
2635 va_list ap;
2636 tree fntype = TREE_TYPE (fndecl);
2637 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2638
2639 va_start (ap, n);
2640 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2641 va_end (ap);
2642 SET_EXPR_LOCATION (fn, loc);
2643 return fn;
2644 }
2645
2646 /* Expand a call to one of the builtin rounding functions gcc defines
2647 as an extension (lfloor and lceil). As these are gcc extensions we
2648 do not need to worry about setting errno to EDOM.
2649 If expanding via optab fails, lower expression to (int)(floor(x)).
2650 EXP is the expression that is a call to the builtin function;
2651 if convenient, the result should be placed in TARGET. */
2652
2653 static rtx
2654 expand_builtin_int_roundingfn (tree exp, rtx target)
2655 {
2656 convert_optab builtin_optab;
2657 rtx op0, insns, tmp;
2658 tree fndecl = get_callee_fndecl (exp);
2659 enum built_in_function fallback_fn;
2660 tree fallback_fndecl;
2661 enum machine_mode mode;
2662 tree arg;
2663
2664 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2665 gcc_unreachable ();
2666
2667 arg = CALL_EXPR_ARG (exp, 0);
2668
2669 switch (DECL_FUNCTION_CODE (fndecl))
2670 {
2671 CASE_FLT_FN (BUILT_IN_ICEIL):
2672 CASE_FLT_FN (BUILT_IN_LCEIL):
2673 CASE_FLT_FN (BUILT_IN_LLCEIL):
2674 builtin_optab = lceil_optab;
2675 fallback_fn = BUILT_IN_CEIL;
2676 break;
2677
2678 CASE_FLT_FN (BUILT_IN_IFLOOR):
2679 CASE_FLT_FN (BUILT_IN_LFLOOR):
2680 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2681 builtin_optab = lfloor_optab;
2682 fallback_fn = BUILT_IN_FLOOR;
2683 break;
2684
2685 default:
2686 gcc_unreachable ();
2687 }
2688
2689 /* Make a suitable register to place result in. */
2690 mode = TYPE_MODE (TREE_TYPE (exp));
2691
2692 target = gen_reg_rtx (mode);
2693
2694 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2695 need to expand the argument again. This way, we will not perform
2696 side-effects more the once. */
2697 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2698
2699 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2700
2701 start_sequence ();
2702
2703 /* Compute into TARGET. */
2704 if (expand_sfix_optab (target, op0, builtin_optab))
2705 {
2706 /* Output the entire sequence. */
2707 insns = get_insns ();
2708 end_sequence ();
2709 emit_insn (insns);
2710 return target;
2711 }
2712
2713 /* If we were unable to expand via the builtin, stop the sequence
2714 (without outputting the insns). */
2715 end_sequence ();
2716
2717 /* Fall back to floating point rounding optab. */
2718 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2719
2720 /* For non-C99 targets we may end up without a fallback fndecl here
2721 if the user called __builtin_lfloor directly. In this case emit
2722 a call to the floor/ceil variants nevertheless. This should result
2723 in the best user experience for not full C99 targets. */
2724 if (fallback_fndecl == NULL_TREE)
2725 {
2726 tree fntype;
2727 const char *name = NULL;
2728
2729 switch (DECL_FUNCTION_CODE (fndecl))
2730 {
2731 case BUILT_IN_ICEIL:
2732 case BUILT_IN_LCEIL:
2733 case BUILT_IN_LLCEIL:
2734 name = "ceil";
2735 break;
2736 case BUILT_IN_ICEILF:
2737 case BUILT_IN_LCEILF:
2738 case BUILT_IN_LLCEILF:
2739 name = "ceilf";
2740 break;
2741 case BUILT_IN_ICEILL:
2742 case BUILT_IN_LCEILL:
2743 case BUILT_IN_LLCEILL:
2744 name = "ceill";
2745 break;
2746 case BUILT_IN_IFLOOR:
2747 case BUILT_IN_LFLOOR:
2748 case BUILT_IN_LLFLOOR:
2749 name = "floor";
2750 break;
2751 case BUILT_IN_IFLOORF:
2752 case BUILT_IN_LFLOORF:
2753 case BUILT_IN_LLFLOORF:
2754 name = "floorf";
2755 break;
2756 case BUILT_IN_IFLOORL:
2757 case BUILT_IN_LFLOORL:
2758 case BUILT_IN_LLFLOORL:
2759 name = "floorl";
2760 break;
2761 default:
2762 gcc_unreachable ();
2763 }
2764
2765 fntype = build_function_type_list (TREE_TYPE (arg),
2766 TREE_TYPE (arg), NULL_TREE);
2767 fallback_fndecl = build_fn_decl (name, fntype);
2768 }
2769
2770 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2771
2772 tmp = expand_normal (exp);
2773 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2774
2775 /* Truncate the result of floating point optab to integer
2776 via expand_fix (). */
2777 target = gen_reg_rtx (mode);
2778 expand_fix (target, tmp, 0);
2779
2780 return target;
2781 }
2782
2783 /* Expand a call to one of the builtin math functions doing integer
2784 conversion (lrint).
2785 Return 0 if a normal call should be emitted rather than expanding the
2786 function in-line. EXP is the expression that is a call to the builtin
2787 function; if convenient, the result should be placed in TARGET. */
2788
2789 static rtx
2790 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2791 {
2792 convert_optab builtin_optab;
2793 rtx op0, insns;
2794 tree fndecl = get_callee_fndecl (exp);
2795 tree arg;
2796 enum machine_mode mode;
2797 enum built_in_function fallback_fn = BUILT_IN_NONE;
2798
2799 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2800 gcc_unreachable ();
2801
2802 arg = CALL_EXPR_ARG (exp, 0);
2803
2804 switch (DECL_FUNCTION_CODE (fndecl))
2805 {
2806 CASE_FLT_FN (BUILT_IN_IRINT):
2807 fallback_fn = BUILT_IN_LRINT;
2808 /* FALLTHRU */
2809 CASE_FLT_FN (BUILT_IN_LRINT):
2810 CASE_FLT_FN (BUILT_IN_LLRINT):
2811 builtin_optab = lrint_optab;
2812 break;
2813
2814 CASE_FLT_FN (BUILT_IN_IROUND):
2815 fallback_fn = BUILT_IN_LROUND;
2816 /* FALLTHRU */
2817 CASE_FLT_FN (BUILT_IN_LROUND):
2818 CASE_FLT_FN (BUILT_IN_LLROUND):
2819 builtin_optab = lround_optab;
2820 break;
2821
2822 default:
2823 gcc_unreachable ();
2824 }
2825
2826 /* There's no easy way to detect the case we need to set EDOM. */
2827 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2828 return NULL_RTX;
2829
2830 /* Make a suitable register to place result in. */
2831 mode = TYPE_MODE (TREE_TYPE (exp));
2832
2833 /* There's no easy way to detect the case we need to set EDOM. */
2834 if (!flag_errno_math)
2835 {
2836 rtx result = gen_reg_rtx (mode);
2837
2838 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2839 need to expand the argument again. This way, we will not perform
2840 side-effects more the once. */
2841 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2842
2843 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2844
2845 start_sequence ();
2846
2847 if (expand_sfix_optab (result, op0, builtin_optab))
2848 {
2849 /* Output the entire sequence. */
2850 insns = get_insns ();
2851 end_sequence ();
2852 emit_insn (insns);
2853 return result;
2854 }
2855
2856 /* If we were unable to expand via the builtin, stop the sequence
2857 (without outputting the insns) and call to the library function
2858 with the stabilized argument list. */
2859 end_sequence ();
2860 }
2861
2862 if (fallback_fn != BUILT_IN_NONE)
2863 {
2864 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2865 targets, (int) round (x) should never be transformed into
2866 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2867 a call to lround in the hope that the target provides at least some
2868 C99 functions. This should result in the best user experience for
2869 not full C99 targets. */
2870 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2871 fallback_fn, 0);
2872
2873 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2874 fallback_fndecl, 1, arg);
2875
2876 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2877 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2878 return convert_to_mode (mode, target, 0);
2879 }
2880
2881 return expand_call (exp, target, target == const0_rtx);
2882 }
2883
2884 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2885 a normal call should be emitted rather than expanding the function
2886 in-line. EXP is the expression that is a call to the builtin
2887 function; if convenient, the result should be placed in TARGET. */
2888
2889 static rtx
2890 expand_builtin_powi (tree exp, rtx target)
2891 {
2892 tree arg0, arg1;
2893 rtx op0, op1;
2894 enum machine_mode mode;
2895 enum machine_mode mode2;
2896
2897 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2898 return NULL_RTX;
2899
2900 arg0 = CALL_EXPR_ARG (exp, 0);
2901 arg1 = CALL_EXPR_ARG (exp, 1);
2902 mode = TYPE_MODE (TREE_TYPE (exp));
2903
2904 /* Emit a libcall to libgcc. */
2905
2906 /* Mode of the 2nd argument must match that of an int. */
2907 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2908
2909 if (target == NULL_RTX)
2910 target = gen_reg_rtx (mode);
2911
2912 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2913 if (GET_MODE (op0) != mode)
2914 op0 = convert_to_mode (mode, op0, 0);
2915 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2916 if (GET_MODE (op1) != mode2)
2917 op1 = convert_to_mode (mode2, op1, 0);
2918
2919 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2920 target, LCT_CONST, mode, 2,
2921 op0, mode, op1, mode2);
2922
2923 return target;
2924 }
2925
2926 /* Expand expression EXP which is a call to the strlen builtin. Return
2927 NULL_RTX if we failed the caller should emit a normal call, otherwise
2928 try to get the result in TARGET, if convenient. */
2929
2930 static rtx
2931 expand_builtin_strlen (tree exp, rtx target,
2932 enum machine_mode target_mode)
2933 {
2934 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2935 return NULL_RTX;
2936 else
2937 {
2938 struct expand_operand ops[4];
2939 rtx pat;
2940 tree len;
2941 tree src = CALL_EXPR_ARG (exp, 0);
2942 rtx src_reg, before_strlen;
2943 enum machine_mode insn_mode = target_mode;
2944 enum insn_code icode = CODE_FOR_nothing;
2945 unsigned int align;
2946
2947 /* If the length can be computed at compile-time, return it. */
2948 len = c_strlen (src, 0);
2949 if (len)
2950 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2951
2952 /* If the length can be computed at compile-time and is constant
2953 integer, but there are side-effects in src, evaluate
2954 src for side-effects, then return len.
2955 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2956 can be optimized into: i++; x = 3; */
2957 len = c_strlen (src, 1);
2958 if (len && TREE_CODE (len) == INTEGER_CST)
2959 {
2960 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2961 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2962 }
2963
2964 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2965
2966 /* If SRC is not a pointer type, don't do this operation inline. */
2967 if (align == 0)
2968 return NULL_RTX;
2969
2970 /* Bail out if we can't compute strlen in the right mode. */
2971 while (insn_mode != VOIDmode)
2972 {
2973 icode = optab_handler (strlen_optab, insn_mode);
2974 if (icode != CODE_FOR_nothing)
2975 break;
2976
2977 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2978 }
2979 if (insn_mode == VOIDmode)
2980 return NULL_RTX;
2981
2982 /* Make a place to hold the source address. We will not expand
2983 the actual source until we are sure that the expansion will
2984 not fail -- there are trees that cannot be expanded twice. */
2985 src_reg = gen_reg_rtx (Pmode);
2986
2987 /* Mark the beginning of the strlen sequence so we can emit the
2988 source operand later. */
2989 before_strlen = get_last_insn ();
2990
2991 create_output_operand (&ops[0], target, insn_mode);
2992 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2993 create_integer_operand (&ops[2], 0);
2994 create_integer_operand (&ops[3], align);
2995 if (!maybe_expand_insn (icode, 4, ops))
2996 return NULL_RTX;
2997
2998 /* Now that we are assured of success, expand the source. */
2999 start_sequence ();
3000 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3001 if (pat != src_reg)
3002 {
3003 #ifdef POINTERS_EXTEND_UNSIGNED
3004 if (GET_MODE (pat) != Pmode)
3005 pat = convert_to_mode (Pmode, pat,
3006 POINTERS_EXTEND_UNSIGNED);
3007 #endif
3008 emit_move_insn (src_reg, pat);
3009 }
3010 pat = get_insns ();
3011 end_sequence ();
3012
3013 if (before_strlen)
3014 emit_insn_after (pat, before_strlen);
3015 else
3016 emit_insn_before (pat, get_insns ());
3017
3018 /* Return the value in the proper mode for this function. */
3019 if (GET_MODE (ops[0].value) == target_mode)
3020 target = ops[0].value;
3021 else if (target != 0)
3022 convert_move (target, ops[0].value, 0);
3023 else
3024 target = convert_to_mode (target_mode, ops[0].value, 0);
3025
3026 return target;
3027 }
3028 }
3029
3030 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3031 bytes from constant string DATA + OFFSET and return it as target
3032 constant. */
3033
3034 static rtx
3035 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3036 enum machine_mode mode)
3037 {
3038 const char *str = (const char *) data;
3039
3040 gcc_assert (offset >= 0
3041 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3042 <= strlen (str) + 1));
3043
3044 return c_readstr (str + offset, mode);
3045 }
3046
3047 /* Expand a call EXP to the memcpy builtin.
3048 Return NULL_RTX if we failed, the caller should emit a normal call,
3049 otherwise try to get the result in TARGET, if convenient (and in
3050 mode MODE if that's convenient). */
3051
3052 static rtx
3053 expand_builtin_memcpy (tree exp, rtx target)
3054 {
3055 if (!validate_arglist (exp,
3056 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3057 return NULL_RTX;
3058 else
3059 {
3060 tree dest = CALL_EXPR_ARG (exp, 0);
3061 tree src = CALL_EXPR_ARG (exp, 1);
3062 tree len = CALL_EXPR_ARG (exp, 2);
3063 const char *src_str;
3064 unsigned int src_align = get_pointer_alignment (src);
3065 unsigned int dest_align = get_pointer_alignment (dest);
3066 rtx dest_mem, src_mem, dest_addr, len_rtx;
3067 HOST_WIDE_INT expected_size = -1;
3068 unsigned int expected_align = 0;
3069
3070 /* If DEST is not a pointer type, call the normal function. */
3071 if (dest_align == 0)
3072 return NULL_RTX;
3073
3074 /* If either SRC is not a pointer type, don't do this
3075 operation in-line. */
3076 if (src_align == 0)
3077 return NULL_RTX;
3078
3079 if (currently_expanding_gimple_stmt)
3080 stringop_block_profile (currently_expanding_gimple_stmt,
3081 &expected_align, &expected_size);
3082
3083 if (expected_align < dest_align)
3084 expected_align = dest_align;
3085 dest_mem = get_memory_rtx (dest, len);
3086 set_mem_align (dest_mem, dest_align);
3087 len_rtx = expand_normal (len);
3088 src_str = c_getstr (src);
3089
3090 /* If SRC is a string constant and block move would be done
3091 by pieces, we can avoid loading the string from memory
3092 and only stored the computed constants. */
3093 if (src_str
3094 && CONST_INT_P (len_rtx)
3095 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3096 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3097 CONST_CAST (char *, src_str),
3098 dest_align, false))
3099 {
3100 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3101 builtin_memcpy_read_str,
3102 CONST_CAST (char *, src_str),
3103 dest_align, false, 0);
3104 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3105 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3106 return dest_mem;
3107 }
3108
3109 src_mem = get_memory_rtx (src, len);
3110 set_mem_align (src_mem, src_align);
3111
3112 /* Copy word part most expediently. */
3113 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3114 CALL_EXPR_TAILCALL (exp)
3115 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3116 expected_align, expected_size);
3117
3118 if (dest_addr == 0)
3119 {
3120 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3121 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3122 }
3123 return dest_addr;
3124 }
3125 }
3126
3127 /* Expand a call EXP to the mempcpy builtin.
3128 Return NULL_RTX if we failed; the caller should emit a normal call,
3129 otherwise try to get the result in TARGET, if convenient (and in
3130 mode MODE if that's convenient). If ENDP is 0 return the
3131 destination pointer, if ENDP is 1 return the end pointer ala
3132 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3133 stpcpy. */
3134
3135 static rtx
3136 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3137 {
3138 if (!validate_arglist (exp,
3139 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3140 return NULL_RTX;
3141 else
3142 {
3143 tree dest = CALL_EXPR_ARG (exp, 0);
3144 tree src = CALL_EXPR_ARG (exp, 1);
3145 tree len = CALL_EXPR_ARG (exp, 2);
3146 return expand_builtin_mempcpy_args (dest, src, len,
3147 target, mode, /*endp=*/ 1);
3148 }
3149 }
3150
3151 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3152 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3153 so that this can also be called without constructing an actual CALL_EXPR.
3154 The other arguments and return value are the same as for
3155 expand_builtin_mempcpy. */
3156
3157 static rtx
3158 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3159 rtx target, enum machine_mode mode, int endp)
3160 {
3161 /* If return value is ignored, transform mempcpy into memcpy. */
3162 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3163 {
3164 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3165 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3166 dest, src, len);
3167 return expand_expr (result, target, mode, EXPAND_NORMAL);
3168 }
3169 else
3170 {
3171 const char *src_str;
3172 unsigned int src_align = get_pointer_alignment (src);
3173 unsigned int dest_align = get_pointer_alignment (dest);
3174 rtx dest_mem, src_mem, len_rtx;
3175
3176 /* If either SRC or DEST is not a pointer type, don't do this
3177 operation in-line. */
3178 if (dest_align == 0 || src_align == 0)
3179 return NULL_RTX;
3180
3181 /* If LEN is not constant, call the normal function. */
3182 if (! host_integerp (len, 1))
3183 return NULL_RTX;
3184
3185 len_rtx = expand_normal (len);
3186 src_str = c_getstr (src);
3187
3188 /* If SRC is a string constant and block move would be done
3189 by pieces, we can avoid loading the string from memory
3190 and only stored the computed constants. */
3191 if (src_str
3192 && CONST_INT_P (len_rtx)
3193 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3194 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3195 CONST_CAST (char *, src_str),
3196 dest_align, false))
3197 {
3198 dest_mem = get_memory_rtx (dest, len);
3199 set_mem_align (dest_mem, dest_align);
3200 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3201 builtin_memcpy_read_str,
3202 CONST_CAST (char *, src_str),
3203 dest_align, false, endp);
3204 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3205 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3206 return dest_mem;
3207 }
3208
3209 if (CONST_INT_P (len_rtx)
3210 && can_move_by_pieces (INTVAL (len_rtx),
3211 MIN (dest_align, src_align)))
3212 {
3213 dest_mem = get_memory_rtx (dest, len);
3214 set_mem_align (dest_mem, dest_align);
3215 src_mem = get_memory_rtx (src, len);
3216 set_mem_align (src_mem, src_align);
3217 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3218 MIN (dest_align, src_align), endp);
3219 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3220 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3221 return dest_mem;
3222 }
3223
3224 return NULL_RTX;
3225 }
3226 }
3227
3228 #ifndef HAVE_movstr
3229 # define HAVE_movstr 0
3230 # define CODE_FOR_movstr CODE_FOR_nothing
3231 #endif
3232
3233 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3234 we failed, the caller should emit a normal call, otherwise try to
3235 get the result in TARGET, if convenient. If ENDP is 0 return the
3236 destination pointer, if ENDP is 1 return the end pointer ala
3237 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3238 stpcpy. */
3239
3240 static rtx
3241 expand_movstr (tree dest, tree src, rtx target, int endp)
3242 {
3243 struct expand_operand ops[3];
3244 rtx dest_mem;
3245 rtx src_mem;
3246
3247 if (!HAVE_movstr)
3248 return NULL_RTX;
3249
3250 dest_mem = get_memory_rtx (dest, NULL);
3251 src_mem = get_memory_rtx (src, NULL);
3252 if (!endp)
3253 {
3254 target = force_reg (Pmode, XEXP (dest_mem, 0));
3255 dest_mem = replace_equiv_address (dest_mem, target);
3256 }
3257
3258 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3259 create_fixed_operand (&ops[1], dest_mem);
3260 create_fixed_operand (&ops[2], src_mem);
3261 expand_insn (CODE_FOR_movstr, 3, ops);
3262
3263 if (endp && target != const0_rtx)
3264 {
3265 target = ops[0].value;
3266 /* movstr is supposed to set end to the address of the NUL
3267 terminator. If the caller requested a mempcpy-like return value,
3268 adjust it. */
3269 if (endp == 1)
3270 {
3271 rtx tem = plus_constant (GET_MODE (target),
3272 gen_lowpart (GET_MODE (target), target), 1);
3273 emit_move_insn (target, force_operand (tem, NULL_RTX));
3274 }
3275 }
3276 return target;
3277 }
3278
3279 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3280 NULL_RTX if we failed the caller should emit a normal call, otherwise
3281 try to get the result in TARGET, if convenient (and in mode MODE if that's
3282 convenient). */
3283
3284 static rtx
3285 expand_builtin_strcpy (tree exp, rtx target)
3286 {
3287 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3288 {
3289 tree dest = CALL_EXPR_ARG (exp, 0);
3290 tree src = CALL_EXPR_ARG (exp, 1);
3291 return expand_builtin_strcpy_args (dest, src, target);
3292 }
3293 return NULL_RTX;
3294 }
3295
3296 /* Helper function to do the actual work for expand_builtin_strcpy. The
3297 arguments to the builtin_strcpy call DEST and SRC are broken out
3298 so that this can also be called without constructing an actual CALL_EXPR.
3299 The other arguments and return value are the same as for
3300 expand_builtin_strcpy. */
3301
3302 static rtx
3303 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3304 {
3305 return expand_movstr (dest, src, target, /*endp=*/0);
3306 }
3307
3308 /* Expand a call EXP to the stpcpy builtin.
3309 Return NULL_RTX if we failed the caller should emit a normal call,
3310 otherwise try to get the result in TARGET, if convenient (and in
3311 mode MODE if that's convenient). */
3312
3313 static rtx
3314 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3315 {
3316 tree dst, src;
3317 location_t loc = EXPR_LOCATION (exp);
3318
3319 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3320 return NULL_RTX;
3321
3322 dst = CALL_EXPR_ARG (exp, 0);
3323 src = CALL_EXPR_ARG (exp, 1);
3324
3325 /* If return value is ignored, transform stpcpy into strcpy. */
3326 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3327 {
3328 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3329 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3330 return expand_expr (result, target, mode, EXPAND_NORMAL);
3331 }
3332 else
3333 {
3334 tree len, lenp1;
3335 rtx ret;
3336
3337 /* Ensure we get an actual string whose length can be evaluated at
3338 compile-time, not an expression containing a string. This is
3339 because the latter will potentially produce pessimized code
3340 when used to produce the return value. */
3341 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3342 return expand_movstr (dst, src, target, /*endp=*/2);
3343
3344 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3345 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3346 target, mode, /*endp=*/2);
3347
3348 if (ret)
3349 return ret;
3350
3351 if (TREE_CODE (len) == INTEGER_CST)
3352 {
3353 rtx len_rtx = expand_normal (len);
3354
3355 if (CONST_INT_P (len_rtx))
3356 {
3357 ret = expand_builtin_strcpy_args (dst, src, target);
3358
3359 if (ret)
3360 {
3361 if (! target)
3362 {
3363 if (mode != VOIDmode)
3364 target = gen_reg_rtx (mode);
3365 else
3366 target = gen_reg_rtx (GET_MODE (ret));
3367 }
3368 if (GET_MODE (target) != GET_MODE (ret))
3369 ret = gen_lowpart (GET_MODE (target), ret);
3370
3371 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3372 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3373 gcc_assert (ret);
3374
3375 return target;
3376 }
3377 }
3378 }
3379
3380 return expand_movstr (dst, src, target, /*endp=*/2);
3381 }
3382 }
3383
3384 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3385 bytes from constant string DATA + OFFSET and return it as target
3386 constant. */
3387
3388 rtx
3389 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3390 enum machine_mode mode)
3391 {
3392 const char *str = (const char *) data;
3393
3394 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3395 return const0_rtx;
3396
3397 return c_readstr (str + offset, mode);
3398 }
3399
3400 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3401 NULL_RTX if we failed the caller should emit a normal call. */
3402
3403 static rtx
3404 expand_builtin_strncpy (tree exp, rtx target)
3405 {
3406 location_t loc = EXPR_LOCATION (exp);
3407
3408 if (validate_arglist (exp,
3409 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3410 {
3411 tree dest = CALL_EXPR_ARG (exp, 0);
3412 tree src = CALL_EXPR_ARG (exp, 1);
3413 tree len = CALL_EXPR_ARG (exp, 2);
3414 tree slen = c_strlen (src, 1);
3415
3416 /* We must be passed a constant len and src parameter. */
3417 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3418 return NULL_RTX;
3419
3420 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3421
3422 /* We're required to pad with trailing zeros if the requested
3423 len is greater than strlen(s2)+1. In that case try to
3424 use store_by_pieces, if it fails, punt. */
3425 if (tree_int_cst_lt (slen, len))
3426 {
3427 unsigned int dest_align = get_pointer_alignment (dest);
3428 const char *p = c_getstr (src);
3429 rtx dest_mem;
3430
3431 if (!p || dest_align == 0 || !host_integerp (len, 1)
3432 || !can_store_by_pieces (tree_low_cst (len, 1),
3433 builtin_strncpy_read_str,
3434 CONST_CAST (char *, p),
3435 dest_align, false))
3436 return NULL_RTX;
3437
3438 dest_mem = get_memory_rtx (dest, len);
3439 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3440 builtin_strncpy_read_str,
3441 CONST_CAST (char *, p), dest_align, false, 0);
3442 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3443 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3444 return dest_mem;
3445 }
3446 }
3447 return NULL_RTX;
3448 }
3449
3450 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3451 bytes from constant string DATA + OFFSET and return it as target
3452 constant. */
3453
3454 rtx
3455 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3456 enum machine_mode mode)
3457 {
3458 const char *c = (const char *) data;
3459 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3460
3461 memset (p, *c, GET_MODE_SIZE (mode));
3462
3463 return c_readstr (p, mode);
3464 }
3465
3466 /* Callback routine for store_by_pieces. Return the RTL of a register
3467 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3468 char value given in the RTL register data. For example, if mode is
3469 4 bytes wide, return the RTL for 0x01010101*data. */
3470
3471 static rtx
3472 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3473 enum machine_mode mode)
3474 {
3475 rtx target, coeff;
3476 size_t size;
3477 char *p;
3478
3479 size = GET_MODE_SIZE (mode);
3480 if (size == 1)
3481 return (rtx) data;
3482
3483 p = XALLOCAVEC (char, size);
3484 memset (p, 1, size);
3485 coeff = c_readstr (p, mode);
3486
3487 target = convert_to_mode (mode, (rtx) data, 1);
3488 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3489 return force_reg (mode, target);
3490 }
3491
3492 /* Expand expression EXP, which is a call to the memset builtin. Return
3493 NULL_RTX if we failed the caller should emit a normal call, otherwise
3494 try to get the result in TARGET, if convenient (and in mode MODE if that's
3495 convenient). */
3496
3497 static rtx
3498 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3499 {
3500 if (!validate_arglist (exp,
3501 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3502 return NULL_RTX;
3503 else
3504 {
3505 tree dest = CALL_EXPR_ARG (exp, 0);
3506 tree val = CALL_EXPR_ARG (exp, 1);
3507 tree len = CALL_EXPR_ARG (exp, 2);
3508 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3509 }
3510 }
3511
3512 /* Helper function to do the actual work for expand_builtin_memset. The
3513 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3514 so that this can also be called without constructing an actual CALL_EXPR.
3515 The other arguments and return value are the same as for
3516 expand_builtin_memset. */
3517
3518 static rtx
3519 expand_builtin_memset_args (tree dest, tree val, tree len,
3520 rtx target, enum machine_mode mode, tree orig_exp)
3521 {
3522 tree fndecl, fn;
3523 enum built_in_function fcode;
3524 enum machine_mode val_mode;
3525 char c;
3526 unsigned int dest_align;
3527 rtx dest_mem, dest_addr, len_rtx;
3528 HOST_WIDE_INT expected_size = -1;
3529 unsigned int expected_align = 0;
3530
3531 dest_align = get_pointer_alignment (dest);
3532
3533 /* If DEST is not a pointer type, don't do this operation in-line. */
3534 if (dest_align == 0)
3535 return NULL_RTX;
3536
3537 if (currently_expanding_gimple_stmt)
3538 stringop_block_profile (currently_expanding_gimple_stmt,
3539 &expected_align, &expected_size);
3540
3541 if (expected_align < dest_align)
3542 expected_align = dest_align;
3543
3544 /* If the LEN parameter is zero, return DEST. */
3545 if (integer_zerop (len))
3546 {
3547 /* Evaluate and ignore VAL in case it has side-effects. */
3548 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3549 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3550 }
3551
3552 /* Stabilize the arguments in case we fail. */
3553 dest = builtin_save_expr (dest);
3554 val = builtin_save_expr (val);
3555 len = builtin_save_expr (len);
3556
3557 len_rtx = expand_normal (len);
3558 dest_mem = get_memory_rtx (dest, len);
3559 val_mode = TYPE_MODE (unsigned_char_type_node);
3560
3561 if (TREE_CODE (val) != INTEGER_CST)
3562 {
3563 rtx val_rtx;
3564
3565 val_rtx = expand_normal (val);
3566 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3567
3568 /* Assume that we can memset by pieces if we can store
3569 * the coefficients by pieces (in the required modes).
3570 * We can't pass builtin_memset_gen_str as that emits RTL. */
3571 c = 1;
3572 if (host_integerp (len, 1)
3573 && can_store_by_pieces (tree_low_cst (len, 1),
3574 builtin_memset_read_str, &c, dest_align,
3575 true))
3576 {
3577 val_rtx = force_reg (val_mode, val_rtx);
3578 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3579 builtin_memset_gen_str, val_rtx, dest_align,
3580 true, 0);
3581 }
3582 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3583 dest_align, expected_align,
3584 expected_size))
3585 goto do_libcall;
3586
3587 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3588 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3589 return dest_mem;
3590 }
3591
3592 if (target_char_cast (val, &c))
3593 goto do_libcall;
3594
3595 if (c)
3596 {
3597 if (host_integerp (len, 1)
3598 && can_store_by_pieces (tree_low_cst (len, 1),
3599 builtin_memset_read_str, &c, dest_align,
3600 true))
3601 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3602 builtin_memset_read_str, &c, dest_align, true, 0);
3603 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3604 gen_int_mode (c, val_mode),
3605 dest_align, expected_align,
3606 expected_size))
3607 goto do_libcall;
3608
3609 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3610 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3611 return dest_mem;
3612 }
3613
3614 set_mem_align (dest_mem, dest_align);
3615 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3616 CALL_EXPR_TAILCALL (orig_exp)
3617 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3618 expected_align, expected_size);
3619
3620 if (dest_addr == 0)
3621 {
3622 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3623 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3624 }
3625
3626 return dest_addr;
3627
3628 do_libcall:
3629 fndecl = get_callee_fndecl (orig_exp);
3630 fcode = DECL_FUNCTION_CODE (fndecl);
3631 if (fcode == BUILT_IN_MEMSET)
3632 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3633 dest, val, len);
3634 else if (fcode == BUILT_IN_BZERO)
3635 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3636 dest, len);
3637 else
3638 gcc_unreachable ();
3639 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3640 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3641 return expand_call (fn, target, target == const0_rtx);
3642 }
3643
3644 /* Expand expression EXP, which is a call to the bzero builtin. Return
3645 NULL_RTX if we failed the caller should emit a normal call. */
3646
3647 static rtx
3648 expand_builtin_bzero (tree exp)
3649 {
3650 tree dest, size;
3651 location_t loc = EXPR_LOCATION (exp);
3652
3653 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3654 return NULL_RTX;
3655
3656 dest = CALL_EXPR_ARG (exp, 0);
3657 size = CALL_EXPR_ARG (exp, 1);
3658
3659 /* New argument list transforming bzero(ptr x, int y) to
3660 memset(ptr x, int 0, size_t y). This is done this way
3661 so that if it isn't expanded inline, we fallback to
3662 calling bzero instead of memset. */
3663
3664 return expand_builtin_memset_args (dest, integer_zero_node,
3665 fold_convert_loc (loc,
3666 size_type_node, size),
3667 const0_rtx, VOIDmode, exp);
3668 }
3669
3670 /* Expand expression EXP, which is a call to the memcmp built-in function.
3671 Return NULL_RTX if we failed and the caller should emit a normal call,
3672 otherwise try to get the result in TARGET, if convenient (and in mode
3673 MODE, if that's convenient). */
3674
3675 static rtx
3676 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3677 ATTRIBUTE_UNUSED enum machine_mode mode)
3678 {
3679 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3680
3681 if (!validate_arglist (exp,
3682 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3683 return NULL_RTX;
3684
3685 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3686 implementing memcmp because it will stop if it encounters two
3687 zero bytes. */
3688 #if defined HAVE_cmpmemsi
3689 {
3690 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3691 rtx result;
3692 rtx insn;
3693 tree arg1 = CALL_EXPR_ARG (exp, 0);
3694 tree arg2 = CALL_EXPR_ARG (exp, 1);
3695 tree len = CALL_EXPR_ARG (exp, 2);
3696
3697 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3698 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3699 enum machine_mode insn_mode;
3700
3701 if (HAVE_cmpmemsi)
3702 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3703 else
3704 return NULL_RTX;
3705
3706 /* If we don't have POINTER_TYPE, call the function. */
3707 if (arg1_align == 0 || arg2_align == 0)
3708 return NULL_RTX;
3709
3710 /* Make a place to write the result of the instruction. */
3711 result = target;
3712 if (! (result != 0
3713 && REG_P (result) && GET_MODE (result) == insn_mode
3714 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3715 result = gen_reg_rtx (insn_mode);
3716
3717 arg1_rtx = get_memory_rtx (arg1, len);
3718 arg2_rtx = get_memory_rtx (arg2, len);
3719 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3720
3721 /* Set MEM_SIZE as appropriate. */
3722 if (CONST_INT_P (arg3_rtx))
3723 {
3724 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3725 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3726 }
3727
3728 if (HAVE_cmpmemsi)
3729 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3730 GEN_INT (MIN (arg1_align, arg2_align)));
3731 else
3732 gcc_unreachable ();
3733
3734 if (insn)
3735 emit_insn (insn);
3736 else
3737 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3738 TYPE_MODE (integer_type_node), 3,
3739 XEXP (arg1_rtx, 0), Pmode,
3740 XEXP (arg2_rtx, 0), Pmode,
3741 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3742 TYPE_UNSIGNED (sizetype)),
3743 TYPE_MODE (sizetype));
3744
3745 /* Return the value in the proper mode for this function. */
3746 mode = TYPE_MODE (TREE_TYPE (exp));
3747 if (GET_MODE (result) == mode)
3748 return result;
3749 else if (target != 0)
3750 {
3751 convert_move (target, result, 0);
3752 return target;
3753 }
3754 else
3755 return convert_to_mode (mode, result, 0);
3756 }
3757 #endif /* HAVE_cmpmemsi. */
3758
3759 return NULL_RTX;
3760 }
3761
3762 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3763 if we failed the caller should emit a normal call, otherwise try to get
3764 the result in TARGET, if convenient. */
3765
3766 static rtx
3767 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3768 {
3769 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3770 return NULL_RTX;
3771
3772 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3773 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3774 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3775 {
3776 rtx arg1_rtx, arg2_rtx;
3777 rtx result, insn = NULL_RTX;
3778 tree fndecl, fn;
3779 tree arg1 = CALL_EXPR_ARG (exp, 0);
3780 tree arg2 = CALL_EXPR_ARG (exp, 1);
3781
3782 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3783 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3784
3785 /* If we don't have POINTER_TYPE, call the function. */
3786 if (arg1_align == 0 || arg2_align == 0)
3787 return NULL_RTX;
3788
3789 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3790 arg1 = builtin_save_expr (arg1);
3791 arg2 = builtin_save_expr (arg2);
3792
3793 arg1_rtx = get_memory_rtx (arg1, NULL);
3794 arg2_rtx = get_memory_rtx (arg2, NULL);
3795
3796 #ifdef HAVE_cmpstrsi
3797 /* Try to call cmpstrsi. */
3798 if (HAVE_cmpstrsi)
3799 {
3800 enum machine_mode insn_mode
3801 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3802
3803 /* Make a place to write the result of the instruction. */
3804 result = target;
3805 if (! (result != 0
3806 && REG_P (result) && GET_MODE (result) == insn_mode
3807 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3808 result = gen_reg_rtx (insn_mode);
3809
3810 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3811 GEN_INT (MIN (arg1_align, arg2_align)));
3812 }
3813 #endif
3814 #ifdef HAVE_cmpstrnsi
3815 /* Try to determine at least one length and call cmpstrnsi. */
3816 if (!insn && HAVE_cmpstrnsi)
3817 {
3818 tree len;
3819 rtx arg3_rtx;
3820
3821 enum machine_mode insn_mode
3822 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3823 tree len1 = c_strlen (arg1, 1);
3824 tree len2 = c_strlen (arg2, 1);
3825
3826 if (len1)
3827 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3828 if (len2)
3829 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3830
3831 /* If we don't have a constant length for the first, use the length
3832 of the second, if we know it. We don't require a constant for
3833 this case; some cost analysis could be done if both are available
3834 but neither is constant. For now, assume they're equally cheap,
3835 unless one has side effects. If both strings have constant lengths,
3836 use the smaller. */
3837
3838 if (!len1)
3839 len = len2;
3840 else if (!len2)
3841 len = len1;
3842 else if (TREE_SIDE_EFFECTS (len1))
3843 len = len2;
3844 else if (TREE_SIDE_EFFECTS (len2))
3845 len = len1;
3846 else if (TREE_CODE (len1) != INTEGER_CST)
3847 len = len2;
3848 else if (TREE_CODE (len2) != INTEGER_CST)
3849 len = len1;
3850 else if (tree_int_cst_lt (len1, len2))
3851 len = len1;
3852 else
3853 len = len2;
3854
3855 /* If both arguments have side effects, we cannot optimize. */
3856 if (!len || TREE_SIDE_EFFECTS (len))
3857 goto do_libcall;
3858
3859 arg3_rtx = expand_normal (len);
3860
3861 /* Make a place to write the result of the instruction. */
3862 result = target;
3863 if (! (result != 0
3864 && REG_P (result) && GET_MODE (result) == insn_mode
3865 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3866 result = gen_reg_rtx (insn_mode);
3867
3868 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3869 GEN_INT (MIN (arg1_align, arg2_align)));
3870 }
3871 #endif
3872
3873 if (insn)
3874 {
3875 enum machine_mode mode;
3876 emit_insn (insn);
3877
3878 /* Return the value in the proper mode for this function. */
3879 mode = TYPE_MODE (TREE_TYPE (exp));
3880 if (GET_MODE (result) == mode)
3881 return result;
3882 if (target == 0)
3883 return convert_to_mode (mode, result, 0);
3884 convert_move (target, result, 0);
3885 return target;
3886 }
3887
3888 /* Expand the library call ourselves using a stabilized argument
3889 list to avoid re-evaluating the function's arguments twice. */
3890 #ifdef HAVE_cmpstrnsi
3891 do_libcall:
3892 #endif
3893 fndecl = get_callee_fndecl (exp);
3894 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3895 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3896 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3897 return expand_call (fn, target, target == const0_rtx);
3898 }
3899 #endif
3900 return NULL_RTX;
3901 }
3902
3903 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3904 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3905 the result in TARGET, if convenient. */
3906
3907 static rtx
3908 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3909 ATTRIBUTE_UNUSED enum machine_mode mode)
3910 {
3911 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3912
3913 if (!validate_arglist (exp,
3914 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3915 return NULL_RTX;
3916
3917 /* If c_strlen can determine an expression for one of the string
3918 lengths, and it doesn't have side effects, then emit cmpstrnsi
3919 using length MIN(strlen(string)+1, arg3). */
3920 #ifdef HAVE_cmpstrnsi
3921 if (HAVE_cmpstrnsi)
3922 {
3923 tree len, len1, len2;
3924 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3925 rtx result, insn;
3926 tree fndecl, fn;
3927 tree arg1 = CALL_EXPR_ARG (exp, 0);
3928 tree arg2 = CALL_EXPR_ARG (exp, 1);
3929 tree arg3 = CALL_EXPR_ARG (exp, 2);
3930
3931 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3932 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3933 enum machine_mode insn_mode
3934 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3935
3936 len1 = c_strlen (arg1, 1);
3937 len2 = c_strlen (arg2, 1);
3938
3939 if (len1)
3940 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3941 if (len2)
3942 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3943
3944 /* If we don't have a constant length for the first, use the length
3945 of the second, if we know it. We don't require a constant for
3946 this case; some cost analysis could be done if both are available
3947 but neither is constant. For now, assume they're equally cheap,
3948 unless one has side effects. If both strings have constant lengths,
3949 use the smaller. */
3950
3951 if (!len1)
3952 len = len2;
3953 else if (!len2)
3954 len = len1;
3955 else if (TREE_SIDE_EFFECTS (len1))
3956 len = len2;
3957 else if (TREE_SIDE_EFFECTS (len2))
3958 len = len1;
3959 else if (TREE_CODE (len1) != INTEGER_CST)
3960 len = len2;
3961 else if (TREE_CODE (len2) != INTEGER_CST)
3962 len = len1;
3963 else if (tree_int_cst_lt (len1, len2))
3964 len = len1;
3965 else
3966 len = len2;
3967
3968 /* If both arguments have side effects, we cannot optimize. */
3969 if (!len || TREE_SIDE_EFFECTS (len))
3970 return NULL_RTX;
3971
3972 /* The actual new length parameter is MIN(len,arg3). */
3973 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3974 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3975
3976 /* If we don't have POINTER_TYPE, call the function. */
3977 if (arg1_align == 0 || arg2_align == 0)
3978 return NULL_RTX;
3979
3980 /* Make a place to write the result of the instruction. */
3981 result = target;
3982 if (! (result != 0
3983 && REG_P (result) && GET_MODE (result) == insn_mode
3984 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3985 result = gen_reg_rtx (insn_mode);
3986
3987 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3988 arg1 = builtin_save_expr (arg1);
3989 arg2 = builtin_save_expr (arg2);
3990 len = builtin_save_expr (len);
3991
3992 arg1_rtx = get_memory_rtx (arg1, len);
3993 arg2_rtx = get_memory_rtx (arg2, len);
3994 arg3_rtx = expand_normal (len);
3995 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3996 GEN_INT (MIN (arg1_align, arg2_align)));
3997 if (insn)
3998 {
3999 emit_insn (insn);
4000
4001 /* Return the value in the proper mode for this function. */
4002 mode = TYPE_MODE (TREE_TYPE (exp));
4003 if (GET_MODE (result) == mode)
4004 return result;
4005 if (target == 0)
4006 return convert_to_mode (mode, result, 0);
4007 convert_move (target, result, 0);
4008 return target;
4009 }
4010
4011 /* Expand the library call ourselves using a stabilized argument
4012 list to avoid re-evaluating the function's arguments twice. */
4013 fndecl = get_callee_fndecl (exp);
4014 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4015 arg1, arg2, len);
4016 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4017 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4018 return expand_call (fn, target, target == const0_rtx);
4019 }
4020 #endif
4021 return NULL_RTX;
4022 }
4023
4024 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4025 if that's convenient. */
4026
4027 rtx
4028 expand_builtin_saveregs (void)
4029 {
4030 rtx val, seq;
4031
4032 /* Don't do __builtin_saveregs more than once in a function.
4033 Save the result of the first call and reuse it. */
4034 if (saveregs_value != 0)
4035 return saveregs_value;
4036
4037 /* When this function is called, it means that registers must be
4038 saved on entry to this function. So we migrate the call to the
4039 first insn of this function. */
4040
4041 start_sequence ();
4042
4043 /* Do whatever the machine needs done in this case. */
4044 val = targetm.calls.expand_builtin_saveregs ();
4045
4046 seq = get_insns ();
4047 end_sequence ();
4048
4049 saveregs_value = val;
4050
4051 /* Put the insns after the NOTE that starts the function. If this
4052 is inside a start_sequence, make the outer-level insn chain current, so
4053 the code is placed at the start of the function. */
4054 push_topmost_sequence ();
4055 emit_insn_after (seq, entry_of_function ());
4056 pop_topmost_sequence ();
4057
4058 return val;
4059 }
4060
4061 /* Expand a call to __builtin_next_arg. */
4062
4063 static rtx
4064 expand_builtin_next_arg (void)
4065 {
4066 /* Checking arguments is already done in fold_builtin_next_arg
4067 that must be called before this function. */
4068 return expand_binop (ptr_mode, add_optab,
4069 crtl->args.internal_arg_pointer,
4070 crtl->args.arg_offset_rtx,
4071 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4072 }
4073
4074 /* Make it easier for the backends by protecting the valist argument
4075 from multiple evaluations. */
4076
4077 static tree
4078 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4079 {
4080 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4081
4082 /* The current way of determining the type of valist is completely
4083 bogus. We should have the information on the va builtin instead. */
4084 if (!vatype)
4085 vatype = targetm.fn_abi_va_list (cfun->decl);
4086
4087 if (TREE_CODE (vatype) == ARRAY_TYPE)
4088 {
4089 if (TREE_SIDE_EFFECTS (valist))
4090 valist = save_expr (valist);
4091
4092 /* For this case, the backends will be expecting a pointer to
4093 vatype, but it's possible we've actually been given an array
4094 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4095 So fix it. */
4096 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4097 {
4098 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4099 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4100 }
4101 }
4102 else
4103 {
4104 tree pt = build_pointer_type (vatype);
4105
4106 if (! needs_lvalue)
4107 {
4108 if (! TREE_SIDE_EFFECTS (valist))
4109 return valist;
4110
4111 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4112 TREE_SIDE_EFFECTS (valist) = 1;
4113 }
4114
4115 if (TREE_SIDE_EFFECTS (valist))
4116 valist = save_expr (valist);
4117 valist = fold_build2_loc (loc, MEM_REF,
4118 vatype, valist, build_int_cst (pt, 0));
4119 }
4120
4121 return valist;
4122 }
4123
4124 /* The "standard" definition of va_list is void*. */
4125
4126 tree
4127 std_build_builtin_va_list (void)
4128 {
4129 return ptr_type_node;
4130 }
4131
4132 /* The "standard" abi va_list is va_list_type_node. */
4133
4134 tree
4135 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4136 {
4137 return va_list_type_node;
4138 }
4139
4140 /* The "standard" type of va_list is va_list_type_node. */
4141
4142 tree
4143 std_canonical_va_list_type (tree type)
4144 {
4145 tree wtype, htype;
4146
4147 if (INDIRECT_REF_P (type))
4148 type = TREE_TYPE (type);
4149 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4150 type = TREE_TYPE (type);
4151 wtype = va_list_type_node;
4152 htype = type;
4153 /* Treat structure va_list types. */
4154 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4155 htype = TREE_TYPE (htype);
4156 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4157 {
4158 /* If va_list is an array type, the argument may have decayed
4159 to a pointer type, e.g. by being passed to another function.
4160 In that case, unwrap both types so that we can compare the
4161 underlying records. */
4162 if (TREE_CODE (htype) == ARRAY_TYPE
4163 || POINTER_TYPE_P (htype))
4164 {
4165 wtype = TREE_TYPE (wtype);
4166 htype = TREE_TYPE (htype);
4167 }
4168 }
4169 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4170 return va_list_type_node;
4171
4172 return NULL_TREE;
4173 }
4174
4175 /* The "standard" implementation of va_start: just assign `nextarg' to
4176 the variable. */
4177
4178 void
4179 std_expand_builtin_va_start (tree valist, rtx nextarg)
4180 {
4181 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4182 convert_move (va_r, nextarg, 0);
4183 }
4184
4185 /* Expand EXP, a call to __builtin_va_start. */
4186
4187 static rtx
4188 expand_builtin_va_start (tree exp)
4189 {
4190 rtx nextarg;
4191 tree valist;
4192 location_t loc = EXPR_LOCATION (exp);
4193
4194 if (call_expr_nargs (exp) < 2)
4195 {
4196 error_at (loc, "too few arguments to function %<va_start%>");
4197 return const0_rtx;
4198 }
4199
4200 if (fold_builtin_next_arg (exp, true))
4201 return const0_rtx;
4202
4203 nextarg = expand_builtin_next_arg ();
4204 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4205
4206 if (targetm.expand_builtin_va_start)
4207 targetm.expand_builtin_va_start (valist, nextarg);
4208 else
4209 std_expand_builtin_va_start (valist, nextarg);
4210
4211 return const0_rtx;
4212 }
4213
4214 /* The "standard" implementation of va_arg: read the value from the
4215 current (padded) address and increment by the (padded) size. */
4216
4217 tree
4218 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4219 gimple_seq *post_p)
4220 {
4221 tree addr, t, type_size, rounded_size, valist_tmp;
4222 unsigned HOST_WIDE_INT align, boundary;
4223 bool indirect;
4224
4225 #ifdef ARGS_GROW_DOWNWARD
4226 /* All of the alignment and movement below is for args-grow-up machines.
4227 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4228 implement their own specialized gimplify_va_arg_expr routines. */
4229 gcc_unreachable ();
4230 #endif
4231
4232 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4233 if (indirect)
4234 type = build_pointer_type (type);
4235
4236 align = PARM_BOUNDARY / BITS_PER_UNIT;
4237 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4238
4239 /* When we align parameter on stack for caller, if the parameter
4240 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4241 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4242 here with caller. */
4243 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4244 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4245
4246 boundary /= BITS_PER_UNIT;
4247
4248 /* Hoist the valist value into a temporary for the moment. */
4249 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4250
4251 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4252 requires greater alignment, we must perform dynamic alignment. */
4253 if (boundary > align
4254 && !integer_zerop (TYPE_SIZE (type)))
4255 {
4256 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4257 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4258 gimplify_and_add (t, pre_p);
4259
4260 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4261 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4262 valist_tmp,
4263 build_int_cst (TREE_TYPE (valist), -boundary)));
4264 gimplify_and_add (t, pre_p);
4265 }
4266 else
4267 boundary = align;
4268
4269 /* If the actual alignment is less than the alignment of the type,
4270 adjust the type accordingly so that we don't assume strict alignment
4271 when dereferencing the pointer. */
4272 boundary *= BITS_PER_UNIT;
4273 if (boundary < TYPE_ALIGN (type))
4274 {
4275 type = build_variant_type_copy (type);
4276 TYPE_ALIGN (type) = boundary;
4277 }
4278
4279 /* Compute the rounded size of the type. */
4280 type_size = size_in_bytes (type);
4281 rounded_size = round_up (type_size, align);
4282
4283 /* Reduce rounded_size so it's sharable with the postqueue. */
4284 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4285
4286 /* Get AP. */
4287 addr = valist_tmp;
4288 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4289 {
4290 /* Small args are padded downward. */
4291 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4292 rounded_size, size_int (align));
4293 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4294 size_binop (MINUS_EXPR, rounded_size, type_size));
4295 addr = fold_build_pointer_plus (addr, t);
4296 }
4297
4298 /* Compute new value for AP. */
4299 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4300 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4301 gimplify_and_add (t, pre_p);
4302
4303 addr = fold_convert (build_pointer_type (type), addr);
4304
4305 if (indirect)
4306 addr = build_va_arg_indirect_ref (addr);
4307
4308 return build_va_arg_indirect_ref (addr);
4309 }
4310
4311 /* Build an indirect-ref expression over the given TREE, which represents a
4312 piece of a va_arg() expansion. */
4313 tree
4314 build_va_arg_indirect_ref (tree addr)
4315 {
4316 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4317
4318 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4319 mf_mark (addr);
4320
4321 return addr;
4322 }
4323
4324 /* Return a dummy expression of type TYPE in order to keep going after an
4325 error. */
4326
4327 static tree
4328 dummy_object (tree type)
4329 {
4330 tree t = build_int_cst (build_pointer_type (type), 0);
4331 return build2 (MEM_REF, type, t, t);
4332 }
4333
4334 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4335 builtin function, but a very special sort of operator. */
4336
4337 enum gimplify_status
4338 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4339 {
4340 tree promoted_type, have_va_type;
4341 tree valist = TREE_OPERAND (*expr_p, 0);
4342 tree type = TREE_TYPE (*expr_p);
4343 tree t;
4344 location_t loc = EXPR_LOCATION (*expr_p);
4345
4346 /* Verify that valist is of the proper type. */
4347 have_va_type = TREE_TYPE (valist);
4348 if (have_va_type == error_mark_node)
4349 return GS_ERROR;
4350 have_va_type = targetm.canonical_va_list_type (have_va_type);
4351
4352 if (have_va_type == NULL_TREE)
4353 {
4354 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4355 return GS_ERROR;
4356 }
4357
4358 /* Generate a diagnostic for requesting data of a type that cannot
4359 be passed through `...' due to type promotion at the call site. */
4360 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4361 != type)
4362 {
4363 static bool gave_help;
4364 bool warned;
4365
4366 /* Unfortunately, this is merely undefined, rather than a constraint
4367 violation, so we cannot make this an error. If this call is never
4368 executed, the program is still strictly conforming. */
4369 warned = warning_at (loc, 0,
4370 "%qT is promoted to %qT when passed through %<...%>",
4371 type, promoted_type);
4372 if (!gave_help && warned)
4373 {
4374 gave_help = true;
4375 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4376 promoted_type, type);
4377 }
4378
4379 /* We can, however, treat "undefined" any way we please.
4380 Call abort to encourage the user to fix the program. */
4381 if (warned)
4382 inform (loc, "if this code is reached, the program will abort");
4383 /* Before the abort, allow the evaluation of the va_list
4384 expression to exit or longjmp. */
4385 gimplify_and_add (valist, pre_p);
4386 t = build_call_expr_loc (loc,
4387 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4388 gimplify_and_add (t, pre_p);
4389
4390 /* This is dead code, but go ahead and finish so that the
4391 mode of the result comes out right. */
4392 *expr_p = dummy_object (type);
4393 return GS_ALL_DONE;
4394 }
4395 else
4396 {
4397 /* Make it easier for the backends by protecting the valist argument
4398 from multiple evaluations. */
4399 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4400 {
4401 /* For this case, the backends will be expecting a pointer to
4402 TREE_TYPE (abi), but it's possible we've
4403 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4404 So fix it. */
4405 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4406 {
4407 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4408 valist = fold_convert_loc (loc, p1,
4409 build_fold_addr_expr_loc (loc, valist));
4410 }
4411
4412 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4413 }
4414 else
4415 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4416
4417 if (!targetm.gimplify_va_arg_expr)
4418 /* FIXME: Once most targets are converted we should merely
4419 assert this is non-null. */
4420 return GS_ALL_DONE;
4421
4422 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4423 return GS_OK;
4424 }
4425 }
4426
4427 /* Expand EXP, a call to __builtin_va_end. */
4428
4429 static rtx
4430 expand_builtin_va_end (tree exp)
4431 {
4432 tree valist = CALL_EXPR_ARG (exp, 0);
4433
4434 /* Evaluate for side effects, if needed. I hate macros that don't
4435 do that. */
4436 if (TREE_SIDE_EFFECTS (valist))
4437 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4438
4439 return const0_rtx;
4440 }
4441
4442 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4443 builtin rather than just as an assignment in stdarg.h because of the
4444 nastiness of array-type va_list types. */
4445
4446 static rtx
4447 expand_builtin_va_copy (tree exp)
4448 {
4449 tree dst, src, t;
4450 location_t loc = EXPR_LOCATION (exp);
4451
4452 dst = CALL_EXPR_ARG (exp, 0);
4453 src = CALL_EXPR_ARG (exp, 1);
4454
4455 dst = stabilize_va_list_loc (loc, dst, 1);
4456 src = stabilize_va_list_loc (loc, src, 0);
4457
4458 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4459
4460 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4461 {
4462 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4463 TREE_SIDE_EFFECTS (t) = 1;
4464 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4465 }
4466 else
4467 {
4468 rtx dstb, srcb, size;
4469
4470 /* Evaluate to pointers. */
4471 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4472 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4473 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4474 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4475
4476 dstb = convert_memory_address (Pmode, dstb);
4477 srcb = convert_memory_address (Pmode, srcb);
4478
4479 /* "Dereference" to BLKmode memories. */
4480 dstb = gen_rtx_MEM (BLKmode, dstb);
4481 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4482 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4483 srcb = gen_rtx_MEM (BLKmode, srcb);
4484 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4485 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4486
4487 /* Copy. */
4488 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4489 }
4490
4491 return const0_rtx;
4492 }
4493
4494 /* Expand a call to one of the builtin functions __builtin_frame_address or
4495 __builtin_return_address. */
4496
4497 static rtx
4498 expand_builtin_frame_address (tree fndecl, tree exp)
4499 {
4500 /* The argument must be a nonnegative integer constant.
4501 It counts the number of frames to scan up the stack.
4502 The value is the return address saved in that frame. */
4503 if (call_expr_nargs (exp) == 0)
4504 /* Warning about missing arg was already issued. */
4505 return const0_rtx;
4506 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4507 {
4508 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4509 error ("invalid argument to %<__builtin_frame_address%>");
4510 else
4511 error ("invalid argument to %<__builtin_return_address%>");
4512 return const0_rtx;
4513 }
4514 else
4515 {
4516 rtx tem
4517 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4518 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4519
4520 /* Some ports cannot access arbitrary stack frames. */
4521 if (tem == NULL)
4522 {
4523 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4524 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4525 else
4526 warning (0, "unsupported argument to %<__builtin_return_address%>");
4527 return const0_rtx;
4528 }
4529
4530 /* For __builtin_frame_address, return what we've got. */
4531 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4532 return tem;
4533
4534 if (!REG_P (tem)
4535 && ! CONSTANT_P (tem))
4536 tem = copy_addr_to_reg (tem);
4537 return tem;
4538 }
4539 }
4540
4541 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4542 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4543 is the same as for allocate_dynamic_stack_space. */
4544
4545 static rtx
4546 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4547 {
4548 rtx op0;
4549 rtx result;
4550 bool valid_arglist;
4551 unsigned int align;
4552 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4553 == BUILT_IN_ALLOCA_WITH_ALIGN);
4554
4555 /* Emit normal call if we use mudflap. */
4556 if (flag_mudflap)
4557 return NULL_RTX;
4558
4559 valid_arglist
4560 = (alloca_with_align
4561 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4562 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4563
4564 if (!valid_arglist)
4565 return NULL_RTX;
4566
4567 /* Compute the argument. */
4568 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4569
4570 /* Compute the alignment. */
4571 align = (alloca_with_align
4572 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4573 : BIGGEST_ALIGNMENT);
4574
4575 /* Allocate the desired space. */
4576 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4577 result = convert_memory_address (ptr_mode, result);
4578
4579 return result;
4580 }
4581
4582 /* Expand a call to bswap builtin in EXP.
4583 Return NULL_RTX if a normal call should be emitted rather than expanding the
4584 function in-line. If convenient, the result should be placed in TARGET.
4585 SUBTARGET may be used as the target for computing one of EXP's operands. */
4586
4587 static rtx
4588 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4589 rtx subtarget)
4590 {
4591 tree arg;
4592 rtx op0;
4593
4594 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4595 return NULL_RTX;
4596
4597 arg = CALL_EXPR_ARG (exp, 0);
4598 op0 = expand_expr (arg,
4599 subtarget && GET_MODE (subtarget) == target_mode
4600 ? subtarget : NULL_RTX,
4601 target_mode, EXPAND_NORMAL);
4602 if (GET_MODE (op0) != target_mode)
4603 op0 = convert_to_mode (target_mode, op0, 1);
4604
4605 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4606
4607 gcc_assert (target);
4608
4609 return convert_to_mode (target_mode, target, 1);
4610 }
4611
4612 /* Expand a call to a unary builtin in EXP.
4613 Return NULL_RTX if a normal call should be emitted rather than expanding the
4614 function in-line. If convenient, the result should be placed in TARGET.
4615 SUBTARGET may be used as the target for computing one of EXP's operands. */
4616
4617 static rtx
4618 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4619 rtx subtarget, optab op_optab)
4620 {
4621 rtx op0;
4622
4623 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4624 return NULL_RTX;
4625
4626 /* Compute the argument. */
4627 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4628 (subtarget
4629 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4630 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4631 VOIDmode, EXPAND_NORMAL);
4632 /* Compute op, into TARGET if possible.
4633 Set TARGET to wherever the result comes back. */
4634 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4635 op_optab, op0, target, op_optab != clrsb_optab);
4636 gcc_assert (target);
4637
4638 return convert_to_mode (target_mode, target, 0);
4639 }
4640
4641 /* Expand a call to __builtin_expect. We just return our argument
4642 as the builtin_expect semantic should've been already executed by
4643 tree branch prediction pass. */
4644
4645 static rtx
4646 expand_builtin_expect (tree exp, rtx target)
4647 {
4648 tree arg;
4649
4650 if (call_expr_nargs (exp) < 2)
4651 return const0_rtx;
4652 arg = CALL_EXPR_ARG (exp, 0);
4653
4654 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4655 /* When guessing was done, the hints should be already stripped away. */
4656 gcc_assert (!flag_guess_branch_prob
4657 || optimize == 0 || seen_error ());
4658 return target;
4659 }
4660
4661 /* Expand a call to __builtin_assume_aligned. We just return our first
4662 argument as the builtin_assume_aligned semantic should've been already
4663 executed by CCP. */
4664
4665 static rtx
4666 expand_builtin_assume_aligned (tree exp, rtx target)
4667 {
4668 if (call_expr_nargs (exp) < 2)
4669 return const0_rtx;
4670 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4671 EXPAND_NORMAL);
4672 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4673 && (call_expr_nargs (exp) < 3
4674 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4675 return target;
4676 }
4677
4678 void
4679 expand_builtin_trap (void)
4680 {
4681 #ifdef HAVE_trap
4682 if (HAVE_trap)
4683 {
4684 rtx insn = emit_insn (gen_trap ());
4685 /* For trap insns when not accumulating outgoing args force
4686 REG_ARGS_SIZE note to prevent crossjumping of calls with
4687 different args sizes. */
4688 if (!ACCUMULATE_OUTGOING_ARGS)
4689 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4690 }
4691 else
4692 #endif
4693 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4694 emit_barrier ();
4695 }
4696
4697 /* Expand a call to __builtin_unreachable. We do nothing except emit
4698 a barrier saying that control flow will not pass here.
4699
4700 It is the responsibility of the program being compiled to ensure
4701 that control flow does never reach __builtin_unreachable. */
4702 static void
4703 expand_builtin_unreachable (void)
4704 {
4705 emit_barrier ();
4706 }
4707
4708 /* Expand EXP, a call to fabs, fabsf or fabsl.
4709 Return NULL_RTX if a normal call should be emitted rather than expanding
4710 the function inline. If convenient, the result should be placed
4711 in TARGET. SUBTARGET may be used as the target for computing
4712 the operand. */
4713
4714 static rtx
4715 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4716 {
4717 enum machine_mode mode;
4718 tree arg;
4719 rtx op0;
4720
4721 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4722 return NULL_RTX;
4723
4724 arg = CALL_EXPR_ARG (exp, 0);
4725 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4726 mode = TYPE_MODE (TREE_TYPE (arg));
4727 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4728 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4729 }
4730
4731 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4732 Return NULL is a normal call should be emitted rather than expanding the
4733 function inline. If convenient, the result should be placed in TARGET.
4734 SUBTARGET may be used as the target for computing the operand. */
4735
4736 static rtx
4737 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4738 {
4739 rtx op0, op1;
4740 tree arg;
4741
4742 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4743 return NULL_RTX;
4744
4745 arg = CALL_EXPR_ARG (exp, 0);
4746 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4747
4748 arg = CALL_EXPR_ARG (exp, 1);
4749 op1 = expand_normal (arg);
4750
4751 return expand_copysign (op0, op1, target);
4752 }
4753
4754 /* Create a new constant string literal and return a char* pointer to it.
4755 The STRING_CST value is the LEN characters at STR. */
4756 tree
4757 build_string_literal (int len, const char *str)
4758 {
4759 tree t, elem, index, type;
4760
4761 t = build_string (len, str);
4762 elem = build_type_variant (char_type_node, 1, 0);
4763 index = build_index_type (size_int (len - 1));
4764 type = build_array_type (elem, index);
4765 TREE_TYPE (t) = type;
4766 TREE_CONSTANT (t) = 1;
4767 TREE_READONLY (t) = 1;
4768 TREE_STATIC (t) = 1;
4769
4770 type = build_pointer_type (elem);
4771 t = build1 (ADDR_EXPR, type,
4772 build4 (ARRAY_REF, elem,
4773 t, integer_zero_node, NULL_TREE, NULL_TREE));
4774 return t;
4775 }
4776
4777 /* Expand a call to __builtin___clear_cache. */
4778
4779 static rtx
4780 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4781 {
4782 #ifndef HAVE_clear_cache
4783 #ifdef CLEAR_INSN_CACHE
4784 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4785 does something. Just do the default expansion to a call to
4786 __clear_cache(). */
4787 return NULL_RTX;
4788 #else
4789 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4790 does nothing. There is no need to call it. Do nothing. */
4791 return const0_rtx;
4792 #endif /* CLEAR_INSN_CACHE */
4793 #else
4794 /* We have a "clear_cache" insn, and it will handle everything. */
4795 tree begin, end;
4796 rtx begin_rtx, end_rtx;
4797
4798 /* We must not expand to a library call. If we did, any
4799 fallback library function in libgcc that might contain a call to
4800 __builtin___clear_cache() would recurse infinitely. */
4801 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4802 {
4803 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4804 return const0_rtx;
4805 }
4806
4807 if (HAVE_clear_cache)
4808 {
4809 struct expand_operand ops[2];
4810
4811 begin = CALL_EXPR_ARG (exp, 0);
4812 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4813
4814 end = CALL_EXPR_ARG (exp, 1);
4815 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4816
4817 create_address_operand (&ops[0], begin_rtx);
4818 create_address_operand (&ops[1], end_rtx);
4819 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4820 return const0_rtx;
4821 }
4822 return const0_rtx;
4823 #endif /* HAVE_clear_cache */
4824 }
4825
4826 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4827
4828 static rtx
4829 round_trampoline_addr (rtx tramp)
4830 {
4831 rtx temp, addend, mask;
4832
4833 /* If we don't need too much alignment, we'll have been guaranteed
4834 proper alignment by get_trampoline_type. */
4835 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4836 return tramp;
4837
4838 /* Round address up to desired boundary. */
4839 temp = gen_reg_rtx (Pmode);
4840 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4841 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4842
4843 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4844 temp, 0, OPTAB_LIB_WIDEN);
4845 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4846 temp, 0, OPTAB_LIB_WIDEN);
4847
4848 return tramp;
4849 }
4850
4851 static rtx
4852 expand_builtin_init_trampoline (tree exp, bool onstack)
4853 {
4854 tree t_tramp, t_func, t_chain;
4855 rtx m_tramp, r_tramp, r_chain, tmp;
4856
4857 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4858 POINTER_TYPE, VOID_TYPE))
4859 return NULL_RTX;
4860
4861 t_tramp = CALL_EXPR_ARG (exp, 0);
4862 t_func = CALL_EXPR_ARG (exp, 1);
4863 t_chain = CALL_EXPR_ARG (exp, 2);
4864
4865 r_tramp = expand_normal (t_tramp);
4866 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4867 MEM_NOTRAP_P (m_tramp) = 1;
4868
4869 /* If ONSTACK, the TRAMP argument should be the address of a field
4870 within the local function's FRAME decl. Either way, let's see if
4871 we can fill in the MEM_ATTRs for this memory. */
4872 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4873 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4874
4875 /* Creator of a heap trampoline is responsible for making sure the
4876 address is aligned to at least STACK_BOUNDARY. Normally malloc
4877 will ensure this anyhow. */
4878 tmp = round_trampoline_addr (r_tramp);
4879 if (tmp != r_tramp)
4880 {
4881 m_tramp = change_address (m_tramp, BLKmode, tmp);
4882 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4883 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4884 }
4885
4886 /* The FUNC argument should be the address of the nested function.
4887 Extract the actual function decl to pass to the hook. */
4888 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4889 t_func = TREE_OPERAND (t_func, 0);
4890 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4891
4892 r_chain = expand_normal (t_chain);
4893
4894 /* Generate insns to initialize the trampoline. */
4895 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4896
4897 if (onstack)
4898 {
4899 trampolines_created = 1;
4900
4901 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4902 "trampoline generated for nested function %qD", t_func);
4903 }
4904
4905 return const0_rtx;
4906 }
4907
4908 static rtx
4909 expand_builtin_adjust_trampoline (tree exp)
4910 {
4911 rtx tramp;
4912
4913 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4914 return NULL_RTX;
4915
4916 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4917 tramp = round_trampoline_addr (tramp);
4918 if (targetm.calls.trampoline_adjust_address)
4919 tramp = targetm.calls.trampoline_adjust_address (tramp);
4920
4921 return tramp;
4922 }
4923
4924 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4925 function. The function first checks whether the back end provides
4926 an insn to implement signbit for the respective mode. If not, it
4927 checks whether the floating point format of the value is such that
4928 the sign bit can be extracted. If that is not the case, the
4929 function returns NULL_RTX to indicate that a normal call should be
4930 emitted rather than expanding the function in-line. EXP is the
4931 expression that is a call to the builtin function; if convenient,
4932 the result should be placed in TARGET. */
4933 static rtx
4934 expand_builtin_signbit (tree exp, rtx target)
4935 {
4936 const struct real_format *fmt;
4937 enum machine_mode fmode, imode, rmode;
4938 tree arg;
4939 int word, bitpos;
4940 enum insn_code icode;
4941 rtx temp;
4942 location_t loc = EXPR_LOCATION (exp);
4943
4944 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4945 return NULL_RTX;
4946
4947 arg = CALL_EXPR_ARG (exp, 0);
4948 fmode = TYPE_MODE (TREE_TYPE (arg));
4949 rmode = TYPE_MODE (TREE_TYPE (exp));
4950 fmt = REAL_MODE_FORMAT (fmode);
4951
4952 arg = builtin_save_expr (arg);
4953
4954 /* Expand the argument yielding a RTX expression. */
4955 temp = expand_normal (arg);
4956
4957 /* Check if the back end provides an insn that handles signbit for the
4958 argument's mode. */
4959 icode = optab_handler (signbit_optab, fmode);
4960 if (icode != CODE_FOR_nothing)
4961 {
4962 rtx last = get_last_insn ();
4963 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4964 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4965 return target;
4966 delete_insns_since (last);
4967 }
4968
4969 /* For floating point formats without a sign bit, implement signbit
4970 as "ARG < 0.0". */
4971 bitpos = fmt->signbit_ro;
4972 if (bitpos < 0)
4973 {
4974 /* But we can't do this if the format supports signed zero. */
4975 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4976 return NULL_RTX;
4977
4978 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4979 build_real (TREE_TYPE (arg), dconst0));
4980 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4981 }
4982
4983 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4984 {
4985 imode = int_mode_for_mode (fmode);
4986 if (imode == BLKmode)
4987 return NULL_RTX;
4988 temp = gen_lowpart (imode, temp);
4989 }
4990 else
4991 {
4992 imode = word_mode;
4993 /* Handle targets with different FP word orders. */
4994 if (FLOAT_WORDS_BIG_ENDIAN)
4995 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4996 else
4997 word = bitpos / BITS_PER_WORD;
4998 temp = operand_subword_force (temp, word, fmode);
4999 bitpos = bitpos % BITS_PER_WORD;
5000 }
5001
5002 /* Force the intermediate word_mode (or narrower) result into a
5003 register. This avoids attempting to create paradoxical SUBREGs
5004 of floating point modes below. */
5005 temp = force_reg (imode, temp);
5006
5007 /* If the bitpos is within the "result mode" lowpart, the operation
5008 can be implement with a single bitwise AND. Otherwise, we need
5009 a right shift and an AND. */
5010
5011 if (bitpos < GET_MODE_BITSIZE (rmode))
5012 {
5013 double_int mask = double_int_zero.set_bit (bitpos);
5014
5015 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5016 temp = gen_lowpart (rmode, temp);
5017 temp = expand_binop (rmode, and_optab, temp,
5018 immed_double_int_const (mask, rmode),
5019 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5020 }
5021 else
5022 {
5023 /* Perform a logical right shift to place the signbit in the least
5024 significant bit, then truncate the result to the desired mode
5025 and mask just this bit. */
5026 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5027 temp = gen_lowpart (rmode, temp);
5028 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5029 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5030 }
5031
5032 return temp;
5033 }
5034
5035 /* Expand fork or exec calls. TARGET is the desired target of the
5036 call. EXP is the call. FN is the
5037 identificator of the actual function. IGNORE is nonzero if the
5038 value is to be ignored. */
5039
5040 static rtx
5041 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5042 {
5043 tree id, decl;
5044 tree call;
5045
5046 /* If we are not profiling, just call the function. */
5047 if (!profile_arc_flag)
5048 return NULL_RTX;
5049
5050 /* Otherwise call the wrapper. This should be equivalent for the rest of
5051 compiler, so the code does not diverge, and the wrapper may run the
5052 code necessary for keeping the profiling sane. */
5053
5054 switch (DECL_FUNCTION_CODE (fn))
5055 {
5056 case BUILT_IN_FORK:
5057 id = get_identifier ("__gcov_fork");
5058 break;
5059
5060 case BUILT_IN_EXECL:
5061 id = get_identifier ("__gcov_execl");
5062 break;
5063
5064 case BUILT_IN_EXECV:
5065 id = get_identifier ("__gcov_execv");
5066 break;
5067
5068 case BUILT_IN_EXECLP:
5069 id = get_identifier ("__gcov_execlp");
5070 break;
5071
5072 case BUILT_IN_EXECLE:
5073 id = get_identifier ("__gcov_execle");
5074 break;
5075
5076 case BUILT_IN_EXECVP:
5077 id = get_identifier ("__gcov_execvp");
5078 break;
5079
5080 case BUILT_IN_EXECVE:
5081 id = get_identifier ("__gcov_execve");
5082 break;
5083
5084 default:
5085 gcc_unreachable ();
5086 }
5087
5088 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5089 FUNCTION_DECL, id, TREE_TYPE (fn));
5090 DECL_EXTERNAL (decl) = 1;
5091 TREE_PUBLIC (decl) = 1;
5092 DECL_ARTIFICIAL (decl) = 1;
5093 TREE_NOTHROW (decl) = 1;
5094 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5095 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5096 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5097 return expand_call (call, target, ignore);
5098 }
5099
5100
5101 \f
5102 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5103 the pointer in these functions is void*, the tree optimizers may remove
5104 casts. The mode computed in expand_builtin isn't reliable either, due
5105 to __sync_bool_compare_and_swap.
5106
5107 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5108 group of builtins. This gives us log2 of the mode size. */
5109
5110 static inline enum machine_mode
5111 get_builtin_sync_mode (int fcode_diff)
5112 {
5113 /* The size is not negotiable, so ask not to get BLKmode in return
5114 if the target indicates that a smaller size would be better. */
5115 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5116 }
5117
5118 /* Expand the memory expression LOC and return the appropriate memory operand
5119 for the builtin_sync operations. */
5120
5121 static rtx
5122 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5123 {
5124 rtx addr, mem;
5125
5126 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5127 addr = convert_memory_address (Pmode, addr);
5128
5129 /* Note that we explicitly do not want any alias information for this
5130 memory, so that we kill all other live memories. Otherwise we don't
5131 satisfy the full barrier semantics of the intrinsic. */
5132 mem = validize_mem (gen_rtx_MEM (mode, addr));
5133
5134 /* The alignment needs to be at least according to that of the mode. */
5135 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5136 get_pointer_alignment (loc)));
5137 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5138 MEM_VOLATILE_P (mem) = 1;
5139
5140 return mem;
5141 }
5142
5143 /* Make sure an argument is in the right mode.
5144 EXP is the tree argument.
5145 MODE is the mode it should be in. */
5146
5147 static rtx
5148 expand_expr_force_mode (tree exp, enum machine_mode mode)
5149 {
5150 rtx val;
5151 enum machine_mode old_mode;
5152
5153 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5154 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5155 of CONST_INTs, where we know the old_mode only from the call argument. */
5156
5157 old_mode = GET_MODE (val);
5158 if (old_mode == VOIDmode)
5159 old_mode = TYPE_MODE (TREE_TYPE (exp));
5160 val = convert_modes (mode, old_mode, val, 1);
5161 return val;
5162 }
5163
5164
5165 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5166 EXP is the CALL_EXPR. CODE is the rtx code
5167 that corresponds to the arithmetic or logical operation from the name;
5168 an exception here is that NOT actually means NAND. TARGET is an optional
5169 place for us to store the results; AFTER is true if this is the
5170 fetch_and_xxx form. */
5171
5172 static rtx
5173 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5174 enum rtx_code code, bool after,
5175 rtx target)
5176 {
5177 rtx val, mem;
5178 location_t loc = EXPR_LOCATION (exp);
5179
5180 if (code == NOT && warn_sync_nand)
5181 {
5182 tree fndecl = get_callee_fndecl (exp);
5183 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5184
5185 static bool warned_f_a_n, warned_n_a_f;
5186
5187 switch (fcode)
5188 {
5189 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5190 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5191 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5192 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5193 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5194 if (warned_f_a_n)
5195 break;
5196
5197 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5198 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5199 warned_f_a_n = true;
5200 break;
5201
5202 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5203 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5204 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5205 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5206 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5207 if (warned_n_a_f)
5208 break;
5209
5210 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5211 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5212 warned_n_a_f = true;
5213 break;
5214
5215 default:
5216 gcc_unreachable ();
5217 }
5218 }
5219
5220 /* Expand the operands. */
5221 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5222 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5223
5224 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5225 after);
5226 }
5227
5228 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5229 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5230 true if this is the boolean form. TARGET is a place for us to store the
5231 results; this is NOT optional if IS_BOOL is true. */
5232
5233 static rtx
5234 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5235 bool is_bool, rtx target)
5236 {
5237 rtx old_val, new_val, mem;
5238 rtx *pbool, *poval;
5239
5240 /* Expand the operands. */
5241 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5242 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5243 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5244
5245 pbool = poval = NULL;
5246 if (target != const0_rtx)
5247 {
5248 if (is_bool)
5249 pbool = &target;
5250 else
5251 poval = &target;
5252 }
5253 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5254 false, MEMMODEL_SEQ_CST,
5255 MEMMODEL_SEQ_CST))
5256 return NULL_RTX;
5257
5258 return target;
5259 }
5260
5261 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5262 general form is actually an atomic exchange, and some targets only
5263 support a reduced form with the second argument being a constant 1.
5264 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5265 the results. */
5266
5267 static rtx
5268 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5269 rtx target)
5270 {
5271 rtx val, mem;
5272
5273 /* Expand the operands. */
5274 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5275 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5276
5277 return expand_sync_lock_test_and_set (target, mem, val);
5278 }
5279
5280 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5281
5282 static void
5283 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5284 {
5285 rtx mem;
5286
5287 /* Expand the operands. */
5288 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5289
5290 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5291 }
5292
5293 /* Given an integer representing an ``enum memmodel'', verify its
5294 correctness and return the memory model enum. */
5295
5296 static enum memmodel
5297 get_memmodel (tree exp)
5298 {
5299 rtx op;
5300 unsigned HOST_WIDE_INT val;
5301
5302 /* If the parameter is not a constant, it's a run time value so we'll just
5303 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5304 if (TREE_CODE (exp) != INTEGER_CST)
5305 return MEMMODEL_SEQ_CST;
5306
5307 op = expand_normal (exp);
5308
5309 val = INTVAL (op);
5310 if (targetm.memmodel_check)
5311 val = targetm.memmodel_check (val);
5312 else if (val & ~MEMMODEL_MASK)
5313 {
5314 warning (OPT_Winvalid_memory_model,
5315 "Unknown architecture specifier in memory model to builtin.");
5316 return MEMMODEL_SEQ_CST;
5317 }
5318
5319 if ((INTVAL(op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5320 {
5321 warning (OPT_Winvalid_memory_model,
5322 "invalid memory model argument to builtin");
5323 return MEMMODEL_SEQ_CST;
5324 }
5325
5326 return (enum memmodel) val;
5327 }
5328
5329 /* Expand the __atomic_exchange intrinsic:
5330 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5331 EXP is the CALL_EXPR.
5332 TARGET is an optional place for us to store the results. */
5333
5334 static rtx
5335 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5336 {
5337 rtx val, mem;
5338 enum memmodel model;
5339
5340 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5341 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5342 {
5343 error ("invalid memory model for %<__atomic_exchange%>");
5344 return NULL_RTX;
5345 }
5346
5347 if (!flag_inline_atomics)
5348 return NULL_RTX;
5349
5350 /* Expand the operands. */
5351 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5352 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5353
5354 return expand_atomic_exchange (target, mem, val, model);
5355 }
5356
5357 /* Expand the __atomic_compare_exchange intrinsic:
5358 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5359 TYPE desired, BOOL weak,
5360 enum memmodel success,
5361 enum memmodel failure)
5362 EXP is the CALL_EXPR.
5363 TARGET is an optional place for us to store the results. */
5364
5365 static rtx
5366 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5367 rtx target)
5368 {
5369 rtx expect, desired, mem, oldval;
5370 enum memmodel success, failure;
5371 tree weak;
5372 bool is_weak;
5373
5374 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5375 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5376
5377 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5378 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5379 {
5380 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5381 return NULL_RTX;
5382 }
5383
5384 if (failure > success)
5385 {
5386 error ("failure memory model cannot be stronger than success "
5387 "memory model for %<__atomic_compare_exchange%>");
5388 return NULL_RTX;
5389 }
5390
5391 if (!flag_inline_atomics)
5392 return NULL_RTX;
5393
5394 /* Expand the operands. */
5395 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5396
5397 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5398 expect = convert_memory_address (Pmode, expect);
5399 expect = gen_rtx_MEM (mode, expect);
5400 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5401
5402 weak = CALL_EXPR_ARG (exp, 3);
5403 is_weak = false;
5404 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5405 is_weak = true;
5406
5407 oldval = expect;
5408 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5409 &oldval, mem, oldval, desired,
5410 is_weak, success, failure))
5411 return NULL_RTX;
5412
5413 if (oldval != expect)
5414 emit_move_insn (expect, oldval);
5415
5416 return target;
5417 }
5418
5419 /* Expand the __atomic_load intrinsic:
5420 TYPE __atomic_load (TYPE *object, enum memmodel)
5421 EXP is the CALL_EXPR.
5422 TARGET is an optional place for us to store the results. */
5423
5424 static rtx
5425 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5426 {
5427 rtx mem;
5428 enum memmodel model;
5429
5430 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5431 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5432 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5433 {
5434 error ("invalid memory model for %<__atomic_load%>");
5435 return NULL_RTX;
5436 }
5437
5438 if (!flag_inline_atomics)
5439 return NULL_RTX;
5440
5441 /* Expand the operand. */
5442 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5443
5444 return expand_atomic_load (target, mem, model);
5445 }
5446
5447
5448 /* Expand the __atomic_store intrinsic:
5449 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5450 EXP is the CALL_EXPR.
5451 TARGET is an optional place for us to store the results. */
5452
5453 static rtx
5454 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5455 {
5456 rtx mem, val;
5457 enum memmodel model;
5458
5459 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5460 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5461 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5462 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5463 {
5464 error ("invalid memory model for %<__atomic_store%>");
5465 return NULL_RTX;
5466 }
5467
5468 if (!flag_inline_atomics)
5469 return NULL_RTX;
5470
5471 /* Expand the operands. */
5472 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5473 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5474
5475 return expand_atomic_store (mem, val, model, false);
5476 }
5477
5478 /* Expand the __atomic_fetch_XXX intrinsic:
5479 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5480 EXP is the CALL_EXPR.
5481 TARGET is an optional place for us to store the results.
5482 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5483 FETCH_AFTER is true if returning the result of the operation.
5484 FETCH_AFTER is false if returning the value before the operation.
5485 IGNORE is true if the result is not used.
5486 EXT_CALL is the correct builtin for an external call if this cannot be
5487 resolved to an instruction sequence. */
5488
5489 static rtx
5490 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5491 enum rtx_code code, bool fetch_after,
5492 bool ignore, enum built_in_function ext_call)
5493 {
5494 rtx val, mem, ret;
5495 enum memmodel model;
5496 tree fndecl;
5497 tree addr;
5498
5499 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5500
5501 /* Expand the operands. */
5502 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5503 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5504
5505 /* Only try generating instructions if inlining is turned on. */
5506 if (flag_inline_atomics)
5507 {
5508 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5509 if (ret)
5510 return ret;
5511 }
5512
5513 /* Return if a different routine isn't needed for the library call. */
5514 if (ext_call == BUILT_IN_NONE)
5515 return NULL_RTX;
5516
5517 /* Change the call to the specified function. */
5518 fndecl = get_callee_fndecl (exp);
5519 addr = CALL_EXPR_FN (exp);
5520 STRIP_NOPS (addr);
5521
5522 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5523 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call);
5524
5525 /* Expand the call here so we can emit trailing code. */
5526 ret = expand_call (exp, target, ignore);
5527
5528 /* Replace the original function just in case it matters. */
5529 TREE_OPERAND (addr, 0) = fndecl;
5530
5531 /* Then issue the arithmetic correction to return the right result. */
5532 if (!ignore)
5533 {
5534 if (code == NOT)
5535 {
5536 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5537 OPTAB_LIB_WIDEN);
5538 ret = expand_simple_unop (mode, NOT, ret, target, true);
5539 }
5540 else
5541 ret = expand_simple_binop (mode, code, ret, val, target, true,
5542 OPTAB_LIB_WIDEN);
5543 }
5544 return ret;
5545 }
5546
5547
5548 #ifndef HAVE_atomic_clear
5549 # define HAVE_atomic_clear 0
5550 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5551 #endif
5552
5553 /* Expand an atomic clear operation.
5554 void _atomic_clear (BOOL *obj, enum memmodel)
5555 EXP is the call expression. */
5556
5557 static rtx
5558 expand_builtin_atomic_clear (tree exp)
5559 {
5560 enum machine_mode mode;
5561 rtx mem, ret;
5562 enum memmodel model;
5563
5564 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5565 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5566 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5567
5568 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5569 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5570 {
5571 error ("invalid memory model for %<__atomic_store%>");
5572 return const0_rtx;
5573 }
5574
5575 if (HAVE_atomic_clear)
5576 {
5577 emit_insn (gen_atomic_clear (mem, model));
5578 return const0_rtx;
5579 }
5580
5581 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5582 Failing that, a store is issued by __atomic_store. The only way this can
5583 fail is if the bool type is larger than a word size. Unlikely, but
5584 handle it anyway for completeness. Assume a single threaded model since
5585 there is no atomic support in this case, and no barriers are required. */
5586 ret = expand_atomic_store (mem, const0_rtx, model, true);
5587 if (!ret)
5588 emit_move_insn (mem, const0_rtx);
5589 return const0_rtx;
5590 }
5591
5592 /* Expand an atomic test_and_set operation.
5593 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5594 EXP is the call expression. */
5595
5596 static rtx
5597 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5598 {
5599 rtx mem;
5600 enum memmodel model;
5601 enum machine_mode mode;
5602
5603 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5604 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5605 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5606
5607 return expand_atomic_test_and_set (target, mem, model);
5608 }
5609
5610
5611 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5612 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5613
5614 static tree
5615 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5616 {
5617 int size;
5618 enum machine_mode mode;
5619 unsigned int mode_align, type_align;
5620
5621 if (TREE_CODE (arg0) != INTEGER_CST)
5622 return NULL_TREE;
5623
5624 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5625 mode = mode_for_size (size, MODE_INT, 0);
5626 mode_align = GET_MODE_ALIGNMENT (mode);
5627
5628 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5629 type_align = mode_align;
5630 else
5631 {
5632 tree ttype = TREE_TYPE (arg1);
5633
5634 /* This function is usually invoked and folded immediately by the front
5635 end before anything else has a chance to look at it. The pointer
5636 parameter at this point is usually cast to a void *, so check for that
5637 and look past the cast. */
5638 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5639 && VOID_TYPE_P (TREE_TYPE (ttype)))
5640 arg1 = TREE_OPERAND (arg1, 0);
5641
5642 ttype = TREE_TYPE (arg1);
5643 gcc_assert (POINTER_TYPE_P (ttype));
5644
5645 /* Get the underlying type of the object. */
5646 ttype = TREE_TYPE (ttype);
5647 type_align = TYPE_ALIGN (ttype);
5648 }
5649
5650 /* If the object has smaller alignment, the the lock free routines cannot
5651 be used. */
5652 if (type_align < mode_align)
5653 return boolean_false_node;
5654
5655 /* Check if a compare_and_swap pattern exists for the mode which represents
5656 the required size. The pattern is not allowed to fail, so the existence
5657 of the pattern indicates support is present. */
5658 if (can_compare_and_swap_p (mode, true))
5659 return boolean_true_node;
5660 else
5661 return boolean_false_node;
5662 }
5663
5664 /* Return true if the parameters to call EXP represent an object which will
5665 always generate lock free instructions. The first argument represents the
5666 size of the object, and the second parameter is a pointer to the object
5667 itself. If NULL is passed for the object, then the result is based on
5668 typical alignment for an object of the specified size. Otherwise return
5669 false. */
5670
5671 static rtx
5672 expand_builtin_atomic_always_lock_free (tree exp)
5673 {
5674 tree size;
5675 tree arg0 = CALL_EXPR_ARG (exp, 0);
5676 tree arg1 = CALL_EXPR_ARG (exp, 1);
5677
5678 if (TREE_CODE (arg0) != INTEGER_CST)
5679 {
5680 error ("non-constant argument 1 to __atomic_always_lock_free");
5681 return const0_rtx;
5682 }
5683
5684 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5685 if (size == boolean_true_node)
5686 return const1_rtx;
5687 return const0_rtx;
5688 }
5689
5690 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5691 is lock free on this architecture. */
5692
5693 static tree
5694 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5695 {
5696 if (!flag_inline_atomics)
5697 return NULL_TREE;
5698
5699 /* If it isn't always lock free, don't generate a result. */
5700 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5701 return boolean_true_node;
5702
5703 return NULL_TREE;
5704 }
5705
5706 /* Return true if the parameters to call EXP represent an object which will
5707 always generate lock free instructions. The first argument represents the
5708 size of the object, and the second parameter is a pointer to the object
5709 itself. If NULL is passed for the object, then the result is based on
5710 typical alignment for an object of the specified size. Otherwise return
5711 NULL*/
5712
5713 static rtx
5714 expand_builtin_atomic_is_lock_free (tree exp)
5715 {
5716 tree size;
5717 tree arg0 = CALL_EXPR_ARG (exp, 0);
5718 tree arg1 = CALL_EXPR_ARG (exp, 1);
5719
5720 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5721 {
5722 error ("non-integer argument 1 to __atomic_is_lock_free");
5723 return NULL_RTX;
5724 }
5725
5726 if (!flag_inline_atomics)
5727 return NULL_RTX;
5728
5729 /* If the value is known at compile time, return the RTX for it. */
5730 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5731 if (size == boolean_true_node)
5732 return const1_rtx;
5733
5734 return NULL_RTX;
5735 }
5736
5737 /* Expand the __atomic_thread_fence intrinsic:
5738 void __atomic_thread_fence (enum memmodel)
5739 EXP is the CALL_EXPR. */
5740
5741 static void
5742 expand_builtin_atomic_thread_fence (tree exp)
5743 {
5744 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5745 expand_mem_thread_fence (model);
5746 }
5747
5748 /* Expand the __atomic_signal_fence intrinsic:
5749 void __atomic_signal_fence (enum memmodel)
5750 EXP is the CALL_EXPR. */
5751
5752 static void
5753 expand_builtin_atomic_signal_fence (tree exp)
5754 {
5755 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5756 expand_mem_signal_fence (model);
5757 }
5758
5759 /* Expand the __sync_synchronize intrinsic. */
5760
5761 static void
5762 expand_builtin_sync_synchronize (void)
5763 {
5764 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5765 }
5766
5767 static rtx
5768 expand_builtin_thread_pointer (tree exp, rtx target)
5769 {
5770 enum insn_code icode;
5771 if (!validate_arglist (exp, VOID_TYPE))
5772 return const0_rtx;
5773 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5774 if (icode != CODE_FOR_nothing)
5775 {
5776 struct expand_operand op;
5777 if (!REG_P (target) || GET_MODE (target) != Pmode)
5778 target = gen_reg_rtx (Pmode);
5779 create_output_operand (&op, target, Pmode);
5780 expand_insn (icode, 1, &op);
5781 return target;
5782 }
5783 error ("__builtin_thread_pointer is not supported on this target");
5784 return const0_rtx;
5785 }
5786
5787 static void
5788 expand_builtin_set_thread_pointer (tree exp)
5789 {
5790 enum insn_code icode;
5791 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5792 return;
5793 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5794 if (icode != CODE_FOR_nothing)
5795 {
5796 struct expand_operand op;
5797 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5798 Pmode, EXPAND_NORMAL);
5799 create_input_operand (&op, val, Pmode);
5800 expand_insn (icode, 1, &op);
5801 return;
5802 }
5803 error ("__builtin_set_thread_pointer is not supported on this target");
5804 }
5805
5806 \f
5807 /* Expand an expression EXP that calls a built-in function,
5808 with result going to TARGET if that's convenient
5809 (and in mode MODE if that's convenient).
5810 SUBTARGET may be used as the target for computing one of EXP's operands.
5811 IGNORE is nonzero if the value is to be ignored. */
5812
5813 rtx
5814 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5815 int ignore)
5816 {
5817 tree fndecl = get_callee_fndecl (exp);
5818 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5819 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5820 int flags;
5821
5822 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5823 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5824
5825 /* When not optimizing, generate calls to library functions for a certain
5826 set of builtins. */
5827 if (!optimize
5828 && !called_as_built_in (fndecl)
5829 && fcode != BUILT_IN_ALLOCA
5830 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5831 && fcode != BUILT_IN_FREE)
5832 return expand_call (exp, target, ignore);
5833
5834 /* The built-in function expanders test for target == const0_rtx
5835 to determine whether the function's result will be ignored. */
5836 if (ignore)
5837 target = const0_rtx;
5838
5839 /* If the result of a pure or const built-in function is ignored, and
5840 none of its arguments are volatile, we can avoid expanding the
5841 built-in call and just evaluate the arguments for side-effects. */
5842 if (target == const0_rtx
5843 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5844 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5845 {
5846 bool volatilep = false;
5847 tree arg;
5848 call_expr_arg_iterator iter;
5849
5850 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5851 if (TREE_THIS_VOLATILE (arg))
5852 {
5853 volatilep = true;
5854 break;
5855 }
5856
5857 if (! volatilep)
5858 {
5859 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5860 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5861 return const0_rtx;
5862 }
5863 }
5864
5865 switch (fcode)
5866 {
5867 CASE_FLT_FN (BUILT_IN_FABS):
5868 target = expand_builtin_fabs (exp, target, subtarget);
5869 if (target)
5870 return target;
5871 break;
5872
5873 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5874 target = expand_builtin_copysign (exp, target, subtarget);
5875 if (target)
5876 return target;
5877 break;
5878
5879 /* Just do a normal library call if we were unable to fold
5880 the values. */
5881 CASE_FLT_FN (BUILT_IN_CABS):
5882 break;
5883
5884 CASE_FLT_FN (BUILT_IN_EXP):
5885 CASE_FLT_FN (BUILT_IN_EXP10):
5886 CASE_FLT_FN (BUILT_IN_POW10):
5887 CASE_FLT_FN (BUILT_IN_EXP2):
5888 CASE_FLT_FN (BUILT_IN_EXPM1):
5889 CASE_FLT_FN (BUILT_IN_LOGB):
5890 CASE_FLT_FN (BUILT_IN_LOG):
5891 CASE_FLT_FN (BUILT_IN_LOG10):
5892 CASE_FLT_FN (BUILT_IN_LOG2):
5893 CASE_FLT_FN (BUILT_IN_LOG1P):
5894 CASE_FLT_FN (BUILT_IN_TAN):
5895 CASE_FLT_FN (BUILT_IN_ASIN):
5896 CASE_FLT_FN (BUILT_IN_ACOS):
5897 CASE_FLT_FN (BUILT_IN_ATAN):
5898 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5899 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5900 because of possible accuracy problems. */
5901 if (! flag_unsafe_math_optimizations)
5902 break;
5903 CASE_FLT_FN (BUILT_IN_SQRT):
5904 CASE_FLT_FN (BUILT_IN_FLOOR):
5905 CASE_FLT_FN (BUILT_IN_CEIL):
5906 CASE_FLT_FN (BUILT_IN_TRUNC):
5907 CASE_FLT_FN (BUILT_IN_ROUND):
5908 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5909 CASE_FLT_FN (BUILT_IN_RINT):
5910 target = expand_builtin_mathfn (exp, target, subtarget);
5911 if (target)
5912 return target;
5913 break;
5914
5915 CASE_FLT_FN (BUILT_IN_FMA):
5916 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5917 if (target)
5918 return target;
5919 break;
5920
5921 CASE_FLT_FN (BUILT_IN_ILOGB):
5922 if (! flag_unsafe_math_optimizations)
5923 break;
5924 CASE_FLT_FN (BUILT_IN_ISINF):
5925 CASE_FLT_FN (BUILT_IN_FINITE):
5926 case BUILT_IN_ISFINITE:
5927 case BUILT_IN_ISNORMAL:
5928 target = expand_builtin_interclass_mathfn (exp, target);
5929 if (target)
5930 return target;
5931 break;
5932
5933 CASE_FLT_FN (BUILT_IN_ICEIL):
5934 CASE_FLT_FN (BUILT_IN_LCEIL):
5935 CASE_FLT_FN (BUILT_IN_LLCEIL):
5936 CASE_FLT_FN (BUILT_IN_LFLOOR):
5937 CASE_FLT_FN (BUILT_IN_IFLOOR):
5938 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5939 target = expand_builtin_int_roundingfn (exp, target);
5940 if (target)
5941 return target;
5942 break;
5943
5944 CASE_FLT_FN (BUILT_IN_IRINT):
5945 CASE_FLT_FN (BUILT_IN_LRINT):
5946 CASE_FLT_FN (BUILT_IN_LLRINT):
5947 CASE_FLT_FN (BUILT_IN_IROUND):
5948 CASE_FLT_FN (BUILT_IN_LROUND):
5949 CASE_FLT_FN (BUILT_IN_LLROUND):
5950 target = expand_builtin_int_roundingfn_2 (exp, target);
5951 if (target)
5952 return target;
5953 break;
5954
5955 CASE_FLT_FN (BUILT_IN_POWI):
5956 target = expand_builtin_powi (exp, target);
5957 if (target)
5958 return target;
5959 break;
5960
5961 CASE_FLT_FN (BUILT_IN_ATAN2):
5962 CASE_FLT_FN (BUILT_IN_LDEXP):
5963 CASE_FLT_FN (BUILT_IN_SCALB):
5964 CASE_FLT_FN (BUILT_IN_SCALBN):
5965 CASE_FLT_FN (BUILT_IN_SCALBLN):
5966 if (! flag_unsafe_math_optimizations)
5967 break;
5968
5969 CASE_FLT_FN (BUILT_IN_FMOD):
5970 CASE_FLT_FN (BUILT_IN_REMAINDER):
5971 CASE_FLT_FN (BUILT_IN_DREM):
5972 CASE_FLT_FN (BUILT_IN_POW):
5973 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5974 if (target)
5975 return target;
5976 break;
5977
5978 CASE_FLT_FN (BUILT_IN_CEXPI):
5979 target = expand_builtin_cexpi (exp, target);
5980 gcc_assert (target);
5981 return target;
5982
5983 CASE_FLT_FN (BUILT_IN_SIN):
5984 CASE_FLT_FN (BUILT_IN_COS):
5985 if (! flag_unsafe_math_optimizations)
5986 break;
5987 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5988 if (target)
5989 return target;
5990 break;
5991
5992 CASE_FLT_FN (BUILT_IN_SINCOS):
5993 if (! flag_unsafe_math_optimizations)
5994 break;
5995 target = expand_builtin_sincos (exp);
5996 if (target)
5997 return target;
5998 break;
5999
6000 case BUILT_IN_APPLY_ARGS:
6001 return expand_builtin_apply_args ();
6002
6003 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6004 FUNCTION with a copy of the parameters described by
6005 ARGUMENTS, and ARGSIZE. It returns a block of memory
6006 allocated on the stack into which is stored all the registers
6007 that might possibly be used for returning the result of a
6008 function. ARGUMENTS is the value returned by
6009 __builtin_apply_args. ARGSIZE is the number of bytes of
6010 arguments that must be copied. ??? How should this value be
6011 computed? We'll also need a safe worst case value for varargs
6012 functions. */
6013 case BUILT_IN_APPLY:
6014 if (!validate_arglist (exp, POINTER_TYPE,
6015 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6016 && !validate_arglist (exp, REFERENCE_TYPE,
6017 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6018 return const0_rtx;
6019 else
6020 {
6021 rtx ops[3];
6022
6023 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6024 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6025 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6026
6027 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6028 }
6029
6030 /* __builtin_return (RESULT) causes the function to return the
6031 value described by RESULT. RESULT is address of the block of
6032 memory returned by __builtin_apply. */
6033 case BUILT_IN_RETURN:
6034 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6035 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6036 return const0_rtx;
6037
6038 case BUILT_IN_SAVEREGS:
6039 return expand_builtin_saveregs ();
6040
6041 case BUILT_IN_VA_ARG_PACK:
6042 /* All valid uses of __builtin_va_arg_pack () are removed during
6043 inlining. */
6044 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6045 return const0_rtx;
6046
6047 case BUILT_IN_VA_ARG_PACK_LEN:
6048 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6049 inlining. */
6050 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6051 return const0_rtx;
6052
6053 /* Return the address of the first anonymous stack arg. */
6054 case BUILT_IN_NEXT_ARG:
6055 if (fold_builtin_next_arg (exp, false))
6056 return const0_rtx;
6057 return expand_builtin_next_arg ();
6058
6059 case BUILT_IN_CLEAR_CACHE:
6060 target = expand_builtin___clear_cache (exp);
6061 if (target)
6062 return target;
6063 break;
6064
6065 case BUILT_IN_CLASSIFY_TYPE:
6066 return expand_builtin_classify_type (exp);
6067
6068 case BUILT_IN_CONSTANT_P:
6069 return const0_rtx;
6070
6071 case BUILT_IN_FRAME_ADDRESS:
6072 case BUILT_IN_RETURN_ADDRESS:
6073 return expand_builtin_frame_address (fndecl, exp);
6074
6075 /* Returns the address of the area where the structure is returned.
6076 0 otherwise. */
6077 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6078 if (call_expr_nargs (exp) != 0
6079 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6080 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6081 return const0_rtx;
6082 else
6083 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6084
6085 case BUILT_IN_ALLOCA:
6086 case BUILT_IN_ALLOCA_WITH_ALIGN:
6087 /* If the allocation stems from the declaration of a variable-sized
6088 object, it cannot accumulate. */
6089 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6090 if (target)
6091 return target;
6092 break;
6093
6094 case BUILT_IN_STACK_SAVE:
6095 return expand_stack_save ();
6096
6097 case BUILT_IN_STACK_RESTORE:
6098 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6099 return const0_rtx;
6100
6101 case BUILT_IN_BSWAP16:
6102 case BUILT_IN_BSWAP32:
6103 case BUILT_IN_BSWAP64:
6104 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6105 if (target)
6106 return target;
6107 break;
6108
6109 CASE_INT_FN (BUILT_IN_FFS):
6110 target = expand_builtin_unop (target_mode, exp, target,
6111 subtarget, ffs_optab);
6112 if (target)
6113 return target;
6114 break;
6115
6116 CASE_INT_FN (BUILT_IN_CLZ):
6117 target = expand_builtin_unop (target_mode, exp, target,
6118 subtarget, clz_optab);
6119 if (target)
6120 return target;
6121 break;
6122
6123 CASE_INT_FN (BUILT_IN_CTZ):
6124 target = expand_builtin_unop (target_mode, exp, target,
6125 subtarget, ctz_optab);
6126 if (target)
6127 return target;
6128 break;
6129
6130 CASE_INT_FN (BUILT_IN_CLRSB):
6131 target = expand_builtin_unop (target_mode, exp, target,
6132 subtarget, clrsb_optab);
6133 if (target)
6134 return target;
6135 break;
6136
6137 CASE_INT_FN (BUILT_IN_POPCOUNT):
6138 target = expand_builtin_unop (target_mode, exp, target,
6139 subtarget, popcount_optab);
6140 if (target)
6141 return target;
6142 break;
6143
6144 CASE_INT_FN (BUILT_IN_PARITY):
6145 target = expand_builtin_unop (target_mode, exp, target,
6146 subtarget, parity_optab);
6147 if (target)
6148 return target;
6149 break;
6150
6151 case BUILT_IN_STRLEN:
6152 target = expand_builtin_strlen (exp, target, target_mode);
6153 if (target)
6154 return target;
6155 break;
6156
6157 case BUILT_IN_STRCPY:
6158 target = expand_builtin_strcpy (exp, target);
6159 if (target)
6160 return target;
6161 break;
6162
6163 case BUILT_IN_STRNCPY:
6164 target = expand_builtin_strncpy (exp, target);
6165 if (target)
6166 return target;
6167 break;
6168
6169 case BUILT_IN_STPCPY:
6170 target = expand_builtin_stpcpy (exp, target, mode);
6171 if (target)
6172 return target;
6173 break;
6174
6175 case BUILT_IN_MEMCPY:
6176 target = expand_builtin_memcpy (exp, target);
6177 if (target)
6178 return target;
6179 break;
6180
6181 case BUILT_IN_MEMPCPY:
6182 target = expand_builtin_mempcpy (exp, target, mode);
6183 if (target)
6184 return target;
6185 break;
6186
6187 case BUILT_IN_MEMSET:
6188 target = expand_builtin_memset (exp, target, mode);
6189 if (target)
6190 return target;
6191 break;
6192
6193 case BUILT_IN_BZERO:
6194 target = expand_builtin_bzero (exp);
6195 if (target)
6196 return target;
6197 break;
6198
6199 case BUILT_IN_STRCMP:
6200 target = expand_builtin_strcmp (exp, target);
6201 if (target)
6202 return target;
6203 break;
6204
6205 case BUILT_IN_STRNCMP:
6206 target = expand_builtin_strncmp (exp, target, mode);
6207 if (target)
6208 return target;
6209 break;
6210
6211 case BUILT_IN_BCMP:
6212 case BUILT_IN_MEMCMP:
6213 target = expand_builtin_memcmp (exp, target, mode);
6214 if (target)
6215 return target;
6216 break;
6217
6218 case BUILT_IN_SETJMP:
6219 /* This should have been lowered to the builtins below. */
6220 gcc_unreachable ();
6221
6222 case BUILT_IN_SETJMP_SETUP:
6223 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6224 and the receiver label. */
6225 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6226 {
6227 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6228 VOIDmode, EXPAND_NORMAL);
6229 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6230 rtx label_r = label_rtx (label);
6231
6232 /* This is copied from the handling of non-local gotos. */
6233 expand_builtin_setjmp_setup (buf_addr, label_r);
6234 nonlocal_goto_handler_labels
6235 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6236 nonlocal_goto_handler_labels);
6237 /* ??? Do not let expand_label treat us as such since we would
6238 not want to be both on the list of non-local labels and on
6239 the list of forced labels. */
6240 FORCED_LABEL (label) = 0;
6241 return const0_rtx;
6242 }
6243 break;
6244
6245 case BUILT_IN_SETJMP_DISPATCHER:
6246 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6247 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6248 {
6249 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6250 rtx label_r = label_rtx (label);
6251
6252 /* Remove the dispatcher label from the list of non-local labels
6253 since the receiver labels have been added to it above. */
6254 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6255 return const0_rtx;
6256 }
6257 break;
6258
6259 case BUILT_IN_SETJMP_RECEIVER:
6260 /* __builtin_setjmp_receiver is passed the receiver label. */
6261 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6262 {
6263 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6264 rtx label_r = label_rtx (label);
6265
6266 expand_builtin_setjmp_receiver (label_r);
6267 return const0_rtx;
6268 }
6269 break;
6270
6271 /* __builtin_longjmp is passed a pointer to an array of five words.
6272 It's similar to the C library longjmp function but works with
6273 __builtin_setjmp above. */
6274 case BUILT_IN_LONGJMP:
6275 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6276 {
6277 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6278 VOIDmode, EXPAND_NORMAL);
6279 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6280
6281 if (value != const1_rtx)
6282 {
6283 error ("%<__builtin_longjmp%> second argument must be 1");
6284 return const0_rtx;
6285 }
6286
6287 expand_builtin_longjmp (buf_addr, value);
6288 return const0_rtx;
6289 }
6290 break;
6291
6292 case BUILT_IN_NONLOCAL_GOTO:
6293 target = expand_builtin_nonlocal_goto (exp);
6294 if (target)
6295 return target;
6296 break;
6297
6298 /* This updates the setjmp buffer that is its argument with the value
6299 of the current stack pointer. */
6300 case BUILT_IN_UPDATE_SETJMP_BUF:
6301 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6302 {
6303 rtx buf_addr
6304 = expand_normal (CALL_EXPR_ARG (exp, 0));
6305
6306 expand_builtin_update_setjmp_buf (buf_addr);
6307 return const0_rtx;
6308 }
6309 break;
6310
6311 case BUILT_IN_TRAP:
6312 expand_builtin_trap ();
6313 return const0_rtx;
6314
6315 case BUILT_IN_UNREACHABLE:
6316 expand_builtin_unreachable ();
6317 return const0_rtx;
6318
6319 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6320 case BUILT_IN_SIGNBITD32:
6321 case BUILT_IN_SIGNBITD64:
6322 case BUILT_IN_SIGNBITD128:
6323 target = expand_builtin_signbit (exp, target);
6324 if (target)
6325 return target;
6326 break;
6327
6328 /* Various hooks for the DWARF 2 __throw routine. */
6329 case BUILT_IN_UNWIND_INIT:
6330 expand_builtin_unwind_init ();
6331 return const0_rtx;
6332 case BUILT_IN_DWARF_CFA:
6333 return virtual_cfa_rtx;
6334 #ifdef DWARF2_UNWIND_INFO
6335 case BUILT_IN_DWARF_SP_COLUMN:
6336 return expand_builtin_dwarf_sp_column ();
6337 case BUILT_IN_INIT_DWARF_REG_SIZES:
6338 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6339 return const0_rtx;
6340 #endif
6341 case BUILT_IN_FROB_RETURN_ADDR:
6342 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6343 case BUILT_IN_EXTRACT_RETURN_ADDR:
6344 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6345 case BUILT_IN_EH_RETURN:
6346 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6347 CALL_EXPR_ARG (exp, 1));
6348 return const0_rtx;
6349 #ifdef EH_RETURN_DATA_REGNO
6350 case BUILT_IN_EH_RETURN_DATA_REGNO:
6351 return expand_builtin_eh_return_data_regno (exp);
6352 #endif
6353 case BUILT_IN_EXTEND_POINTER:
6354 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6355 case BUILT_IN_EH_POINTER:
6356 return expand_builtin_eh_pointer (exp);
6357 case BUILT_IN_EH_FILTER:
6358 return expand_builtin_eh_filter (exp);
6359 case BUILT_IN_EH_COPY_VALUES:
6360 return expand_builtin_eh_copy_values (exp);
6361
6362 case BUILT_IN_VA_START:
6363 return expand_builtin_va_start (exp);
6364 case BUILT_IN_VA_END:
6365 return expand_builtin_va_end (exp);
6366 case BUILT_IN_VA_COPY:
6367 return expand_builtin_va_copy (exp);
6368 case BUILT_IN_EXPECT:
6369 return expand_builtin_expect (exp, target);
6370 case BUILT_IN_ASSUME_ALIGNED:
6371 return expand_builtin_assume_aligned (exp, target);
6372 case BUILT_IN_PREFETCH:
6373 expand_builtin_prefetch (exp);
6374 return const0_rtx;
6375
6376 case BUILT_IN_INIT_TRAMPOLINE:
6377 return expand_builtin_init_trampoline (exp, true);
6378 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6379 return expand_builtin_init_trampoline (exp, false);
6380 case BUILT_IN_ADJUST_TRAMPOLINE:
6381 return expand_builtin_adjust_trampoline (exp);
6382
6383 case BUILT_IN_FORK:
6384 case BUILT_IN_EXECL:
6385 case BUILT_IN_EXECV:
6386 case BUILT_IN_EXECLP:
6387 case BUILT_IN_EXECLE:
6388 case BUILT_IN_EXECVP:
6389 case BUILT_IN_EXECVE:
6390 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6391 if (target)
6392 return target;
6393 break;
6394
6395 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6396 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6397 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6398 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6399 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6400 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6401 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6402 if (target)
6403 return target;
6404 break;
6405
6406 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6407 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6408 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6409 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6410 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6411 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6412 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6413 if (target)
6414 return target;
6415 break;
6416
6417 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6418 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6419 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6420 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6421 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6422 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6423 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6424 if (target)
6425 return target;
6426 break;
6427
6428 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6429 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6430 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6431 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6432 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6433 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6434 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6435 if (target)
6436 return target;
6437 break;
6438
6439 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6440 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6441 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6442 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6443 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6444 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6445 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6446 if (target)
6447 return target;
6448 break;
6449
6450 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6451 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6452 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6453 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6454 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6455 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6456 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6457 if (target)
6458 return target;
6459 break;
6460
6461 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6462 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6463 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6464 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6465 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6466 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6467 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6468 if (target)
6469 return target;
6470 break;
6471
6472 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6473 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6474 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6475 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6476 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6477 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6478 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6479 if (target)
6480 return target;
6481 break;
6482
6483 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6484 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6485 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6486 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6487 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6488 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6489 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6490 if (target)
6491 return target;
6492 break;
6493
6494 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6495 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6496 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6497 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6498 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6500 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6501 if (target)
6502 return target;
6503 break;
6504
6505 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6506 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6507 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6508 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6509 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6510 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6511 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6512 if (target)
6513 return target;
6514 break;
6515
6516 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6517 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6518 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6519 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6520 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6521 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6522 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6523 if (target)
6524 return target;
6525 break;
6526
6527 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6528 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6529 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6530 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6531 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6532 if (mode == VOIDmode)
6533 mode = TYPE_MODE (boolean_type_node);
6534 if (!target || !register_operand (target, mode))
6535 target = gen_reg_rtx (mode);
6536
6537 mode = get_builtin_sync_mode
6538 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6539 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6540 if (target)
6541 return target;
6542 break;
6543
6544 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6545 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6546 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6547 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6548 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6549 mode = get_builtin_sync_mode
6550 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6551 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6552 if (target)
6553 return target;
6554 break;
6555
6556 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6557 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6558 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6559 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6560 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6561 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6562 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6563 if (target)
6564 return target;
6565 break;
6566
6567 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6568 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6569 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6570 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6571 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6572 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6573 expand_builtin_sync_lock_release (mode, exp);
6574 return const0_rtx;
6575
6576 case BUILT_IN_SYNC_SYNCHRONIZE:
6577 expand_builtin_sync_synchronize ();
6578 return const0_rtx;
6579
6580 case BUILT_IN_ATOMIC_EXCHANGE_1:
6581 case BUILT_IN_ATOMIC_EXCHANGE_2:
6582 case BUILT_IN_ATOMIC_EXCHANGE_4:
6583 case BUILT_IN_ATOMIC_EXCHANGE_8:
6584 case BUILT_IN_ATOMIC_EXCHANGE_16:
6585 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6586 target = expand_builtin_atomic_exchange (mode, exp, target);
6587 if (target)
6588 return target;
6589 break;
6590
6591 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6592 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6593 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6594 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6595 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6596 {
6597 unsigned int nargs, z;
6598 vec<tree, va_gc> *vec;
6599
6600 mode =
6601 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6602 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6603 if (target)
6604 return target;
6605
6606 /* If this is turned into an external library call, the weak parameter
6607 must be dropped to match the expected parameter list. */
6608 nargs = call_expr_nargs (exp);
6609 vec_alloc (vec, nargs - 1);
6610 for (z = 0; z < 3; z++)
6611 vec->quick_push (CALL_EXPR_ARG (exp, z));
6612 /* Skip the boolean weak parameter. */
6613 for (z = 4; z < 6; z++)
6614 vec->quick_push (CALL_EXPR_ARG (exp, z));
6615 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6616 break;
6617 }
6618
6619 case BUILT_IN_ATOMIC_LOAD_1:
6620 case BUILT_IN_ATOMIC_LOAD_2:
6621 case BUILT_IN_ATOMIC_LOAD_4:
6622 case BUILT_IN_ATOMIC_LOAD_8:
6623 case BUILT_IN_ATOMIC_LOAD_16:
6624 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6625 target = expand_builtin_atomic_load (mode, exp, target);
6626 if (target)
6627 return target;
6628 break;
6629
6630 case BUILT_IN_ATOMIC_STORE_1:
6631 case BUILT_IN_ATOMIC_STORE_2:
6632 case BUILT_IN_ATOMIC_STORE_4:
6633 case BUILT_IN_ATOMIC_STORE_8:
6634 case BUILT_IN_ATOMIC_STORE_16:
6635 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6636 target = expand_builtin_atomic_store (mode, exp);
6637 if (target)
6638 return const0_rtx;
6639 break;
6640
6641 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6642 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6643 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6644 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6645 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6646 {
6647 enum built_in_function lib;
6648 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6649 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6650 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6651 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6652 ignore, lib);
6653 if (target)
6654 return target;
6655 break;
6656 }
6657 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6658 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6659 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6660 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6661 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6662 {
6663 enum built_in_function lib;
6664 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6665 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6666 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6667 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6668 ignore, lib);
6669 if (target)
6670 return target;
6671 break;
6672 }
6673 case BUILT_IN_ATOMIC_AND_FETCH_1:
6674 case BUILT_IN_ATOMIC_AND_FETCH_2:
6675 case BUILT_IN_ATOMIC_AND_FETCH_4:
6676 case BUILT_IN_ATOMIC_AND_FETCH_8:
6677 case BUILT_IN_ATOMIC_AND_FETCH_16:
6678 {
6679 enum built_in_function lib;
6680 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6681 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6682 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6683 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6684 ignore, lib);
6685 if (target)
6686 return target;
6687 break;
6688 }
6689 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6690 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6691 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6692 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6693 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6694 {
6695 enum built_in_function lib;
6696 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6697 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6698 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6699 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6700 ignore, lib);
6701 if (target)
6702 return target;
6703 break;
6704 }
6705 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6706 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6707 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6708 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6709 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6710 {
6711 enum built_in_function lib;
6712 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6713 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6714 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6715 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6716 ignore, lib);
6717 if (target)
6718 return target;
6719 break;
6720 }
6721 case BUILT_IN_ATOMIC_OR_FETCH_1:
6722 case BUILT_IN_ATOMIC_OR_FETCH_2:
6723 case BUILT_IN_ATOMIC_OR_FETCH_4:
6724 case BUILT_IN_ATOMIC_OR_FETCH_8:
6725 case BUILT_IN_ATOMIC_OR_FETCH_16:
6726 {
6727 enum built_in_function lib;
6728 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6729 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6730 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6732 ignore, lib);
6733 if (target)
6734 return target;
6735 break;
6736 }
6737 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6738 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6739 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6740 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6741 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6742 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6744 ignore, BUILT_IN_NONE);
6745 if (target)
6746 return target;
6747 break;
6748
6749 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6750 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6751 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6752 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6753 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6754 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6755 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6756 ignore, BUILT_IN_NONE);
6757 if (target)
6758 return target;
6759 break;
6760
6761 case BUILT_IN_ATOMIC_FETCH_AND_1:
6762 case BUILT_IN_ATOMIC_FETCH_AND_2:
6763 case BUILT_IN_ATOMIC_FETCH_AND_4:
6764 case BUILT_IN_ATOMIC_FETCH_AND_8:
6765 case BUILT_IN_ATOMIC_FETCH_AND_16:
6766 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6767 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6768 ignore, BUILT_IN_NONE);
6769 if (target)
6770 return target;
6771 break;
6772
6773 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6774 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6775 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6776 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6777 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6778 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6779 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6780 ignore, BUILT_IN_NONE);
6781 if (target)
6782 return target;
6783 break;
6784
6785 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6786 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6787 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6788 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6789 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6790 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6791 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6792 ignore, BUILT_IN_NONE);
6793 if (target)
6794 return target;
6795 break;
6796
6797 case BUILT_IN_ATOMIC_FETCH_OR_1:
6798 case BUILT_IN_ATOMIC_FETCH_OR_2:
6799 case BUILT_IN_ATOMIC_FETCH_OR_4:
6800 case BUILT_IN_ATOMIC_FETCH_OR_8:
6801 case BUILT_IN_ATOMIC_FETCH_OR_16:
6802 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6803 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6804 ignore, BUILT_IN_NONE);
6805 if (target)
6806 return target;
6807 break;
6808
6809 case BUILT_IN_ATOMIC_TEST_AND_SET:
6810 return expand_builtin_atomic_test_and_set (exp, target);
6811
6812 case BUILT_IN_ATOMIC_CLEAR:
6813 return expand_builtin_atomic_clear (exp);
6814
6815 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6816 return expand_builtin_atomic_always_lock_free (exp);
6817
6818 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6819 target = expand_builtin_atomic_is_lock_free (exp);
6820 if (target)
6821 return target;
6822 break;
6823
6824 case BUILT_IN_ATOMIC_THREAD_FENCE:
6825 expand_builtin_atomic_thread_fence (exp);
6826 return const0_rtx;
6827
6828 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6829 expand_builtin_atomic_signal_fence (exp);
6830 return const0_rtx;
6831
6832 case BUILT_IN_OBJECT_SIZE:
6833 return expand_builtin_object_size (exp);
6834
6835 case BUILT_IN_MEMCPY_CHK:
6836 case BUILT_IN_MEMPCPY_CHK:
6837 case BUILT_IN_MEMMOVE_CHK:
6838 case BUILT_IN_MEMSET_CHK:
6839 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6840 if (target)
6841 return target;
6842 break;
6843
6844 case BUILT_IN_STRCPY_CHK:
6845 case BUILT_IN_STPCPY_CHK:
6846 case BUILT_IN_STRNCPY_CHK:
6847 case BUILT_IN_STPNCPY_CHK:
6848 case BUILT_IN_STRCAT_CHK:
6849 case BUILT_IN_STRNCAT_CHK:
6850 case BUILT_IN_SNPRINTF_CHK:
6851 case BUILT_IN_VSNPRINTF_CHK:
6852 maybe_emit_chk_warning (exp, fcode);
6853 break;
6854
6855 case BUILT_IN_SPRINTF_CHK:
6856 case BUILT_IN_VSPRINTF_CHK:
6857 maybe_emit_sprintf_chk_warning (exp, fcode);
6858 break;
6859
6860 case BUILT_IN_FREE:
6861 if (warn_free_nonheap_object)
6862 maybe_emit_free_warning (exp);
6863 break;
6864
6865 case BUILT_IN_THREAD_POINTER:
6866 return expand_builtin_thread_pointer (exp, target);
6867
6868 case BUILT_IN_SET_THREAD_POINTER:
6869 expand_builtin_set_thread_pointer (exp);
6870 return const0_rtx;
6871
6872 default: /* just do library call, if unknown builtin */
6873 break;
6874 }
6875
6876 /* The switch statement above can drop through to cause the function
6877 to be called normally. */
6878 return expand_call (exp, target, ignore);
6879 }
6880
6881 /* Determine whether a tree node represents a call to a built-in
6882 function. If the tree T is a call to a built-in function with
6883 the right number of arguments of the appropriate types, return
6884 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6885 Otherwise the return value is END_BUILTINS. */
6886
6887 enum built_in_function
6888 builtin_mathfn_code (const_tree t)
6889 {
6890 const_tree fndecl, arg, parmlist;
6891 const_tree argtype, parmtype;
6892 const_call_expr_arg_iterator iter;
6893
6894 if (TREE_CODE (t) != CALL_EXPR
6895 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6896 return END_BUILTINS;
6897
6898 fndecl = get_callee_fndecl (t);
6899 if (fndecl == NULL_TREE
6900 || TREE_CODE (fndecl) != FUNCTION_DECL
6901 || ! DECL_BUILT_IN (fndecl)
6902 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6903 return END_BUILTINS;
6904
6905 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6906 init_const_call_expr_arg_iterator (t, &iter);
6907 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6908 {
6909 /* If a function doesn't take a variable number of arguments,
6910 the last element in the list will have type `void'. */
6911 parmtype = TREE_VALUE (parmlist);
6912 if (VOID_TYPE_P (parmtype))
6913 {
6914 if (more_const_call_expr_args_p (&iter))
6915 return END_BUILTINS;
6916 return DECL_FUNCTION_CODE (fndecl);
6917 }
6918
6919 if (! more_const_call_expr_args_p (&iter))
6920 return END_BUILTINS;
6921
6922 arg = next_const_call_expr_arg (&iter);
6923 argtype = TREE_TYPE (arg);
6924
6925 if (SCALAR_FLOAT_TYPE_P (parmtype))
6926 {
6927 if (! SCALAR_FLOAT_TYPE_P (argtype))
6928 return END_BUILTINS;
6929 }
6930 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6931 {
6932 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6933 return END_BUILTINS;
6934 }
6935 else if (POINTER_TYPE_P (parmtype))
6936 {
6937 if (! POINTER_TYPE_P (argtype))
6938 return END_BUILTINS;
6939 }
6940 else if (INTEGRAL_TYPE_P (parmtype))
6941 {
6942 if (! INTEGRAL_TYPE_P (argtype))
6943 return END_BUILTINS;
6944 }
6945 else
6946 return END_BUILTINS;
6947 }
6948
6949 /* Variable-length argument list. */
6950 return DECL_FUNCTION_CODE (fndecl);
6951 }
6952
6953 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6954 evaluate to a constant. */
6955
6956 static tree
6957 fold_builtin_constant_p (tree arg)
6958 {
6959 /* We return 1 for a numeric type that's known to be a constant
6960 value at compile-time or for an aggregate type that's a
6961 literal constant. */
6962 STRIP_NOPS (arg);
6963
6964 /* If we know this is a constant, emit the constant of one. */
6965 if (CONSTANT_CLASS_P (arg)
6966 || (TREE_CODE (arg) == CONSTRUCTOR
6967 && TREE_CONSTANT (arg)))
6968 return integer_one_node;
6969 if (TREE_CODE (arg) == ADDR_EXPR)
6970 {
6971 tree op = TREE_OPERAND (arg, 0);
6972 if (TREE_CODE (op) == STRING_CST
6973 || (TREE_CODE (op) == ARRAY_REF
6974 && integer_zerop (TREE_OPERAND (op, 1))
6975 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6976 return integer_one_node;
6977 }
6978
6979 /* If this expression has side effects, show we don't know it to be a
6980 constant. Likewise if it's a pointer or aggregate type since in
6981 those case we only want literals, since those are only optimized
6982 when generating RTL, not later.
6983 And finally, if we are compiling an initializer, not code, we
6984 need to return a definite result now; there's not going to be any
6985 more optimization done. */
6986 if (TREE_SIDE_EFFECTS (arg)
6987 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6988 || POINTER_TYPE_P (TREE_TYPE (arg))
6989 || cfun == 0
6990 || folding_initializer
6991 || force_folding_builtin_constant_p)
6992 return integer_zero_node;
6993
6994 return NULL_TREE;
6995 }
6996
6997 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6998 return it as a truthvalue. */
6999
7000 static tree
7001 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7002 {
7003 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7004
7005 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7006 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7007 ret_type = TREE_TYPE (TREE_TYPE (fn));
7008 pred_type = TREE_VALUE (arg_types);
7009 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7010
7011 pred = fold_convert_loc (loc, pred_type, pred);
7012 expected = fold_convert_loc (loc, expected_type, expected);
7013 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7014
7015 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7016 build_int_cst (ret_type, 0));
7017 }
7018
7019 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7020 NULL_TREE if no simplification is possible. */
7021
7022 static tree
7023 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7024 {
7025 tree inner, fndecl, inner_arg0;
7026 enum tree_code code;
7027
7028 /* Distribute the expected value over short-circuiting operators.
7029 See through the cast from truthvalue_type_node to long. */
7030 inner_arg0 = arg0;
7031 while (TREE_CODE (inner_arg0) == NOP_EXPR
7032 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7033 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7034 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7035
7036 /* If this is a builtin_expect within a builtin_expect keep the
7037 inner one. See through a comparison against a constant. It
7038 might have been added to create a thruthvalue. */
7039 inner = inner_arg0;
7040
7041 if (COMPARISON_CLASS_P (inner)
7042 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7043 inner = TREE_OPERAND (inner, 0);
7044
7045 if (TREE_CODE (inner) == CALL_EXPR
7046 && (fndecl = get_callee_fndecl (inner))
7047 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7048 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7049 return arg0;
7050
7051 inner = inner_arg0;
7052 code = TREE_CODE (inner);
7053 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7054 {
7055 tree op0 = TREE_OPERAND (inner, 0);
7056 tree op1 = TREE_OPERAND (inner, 1);
7057
7058 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7059 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7060 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7061
7062 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7063 }
7064
7065 /* If the argument isn't invariant then there's nothing else we can do. */
7066 if (!TREE_CONSTANT (inner_arg0))
7067 return NULL_TREE;
7068
7069 /* If we expect that a comparison against the argument will fold to
7070 a constant return the constant. In practice, this means a true
7071 constant or the address of a non-weak symbol. */
7072 inner = inner_arg0;
7073 STRIP_NOPS (inner);
7074 if (TREE_CODE (inner) == ADDR_EXPR)
7075 {
7076 do
7077 {
7078 inner = TREE_OPERAND (inner, 0);
7079 }
7080 while (TREE_CODE (inner) == COMPONENT_REF
7081 || TREE_CODE (inner) == ARRAY_REF);
7082 if ((TREE_CODE (inner) == VAR_DECL
7083 || TREE_CODE (inner) == FUNCTION_DECL)
7084 && DECL_WEAK (inner))
7085 return NULL_TREE;
7086 }
7087
7088 /* Otherwise, ARG0 already has the proper type for the return value. */
7089 return arg0;
7090 }
7091
7092 /* Fold a call to __builtin_classify_type with argument ARG. */
7093
7094 static tree
7095 fold_builtin_classify_type (tree arg)
7096 {
7097 if (arg == 0)
7098 return build_int_cst (integer_type_node, no_type_class);
7099
7100 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7101 }
7102
7103 /* Fold a call to __builtin_strlen with argument ARG. */
7104
7105 static tree
7106 fold_builtin_strlen (location_t loc, tree type, tree arg)
7107 {
7108 if (!validate_arg (arg, POINTER_TYPE))
7109 return NULL_TREE;
7110 else
7111 {
7112 tree len = c_strlen (arg, 0);
7113
7114 if (len)
7115 return fold_convert_loc (loc, type, len);
7116
7117 return NULL_TREE;
7118 }
7119 }
7120
7121 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7122
7123 static tree
7124 fold_builtin_inf (location_t loc, tree type, int warn)
7125 {
7126 REAL_VALUE_TYPE real;
7127
7128 /* __builtin_inff is intended to be usable to define INFINITY on all
7129 targets. If an infinity is not available, INFINITY expands "to a
7130 positive constant of type float that overflows at translation
7131 time", footnote "In this case, using INFINITY will violate the
7132 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7133 Thus we pedwarn to ensure this constraint violation is
7134 diagnosed. */
7135 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7136 pedwarn (loc, 0, "target format does not support infinity");
7137
7138 real_inf (&real);
7139 return build_real (type, real);
7140 }
7141
7142 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7143
7144 static tree
7145 fold_builtin_nan (tree arg, tree type, int quiet)
7146 {
7147 REAL_VALUE_TYPE real;
7148 const char *str;
7149
7150 if (!validate_arg (arg, POINTER_TYPE))
7151 return NULL_TREE;
7152 str = c_getstr (arg);
7153 if (!str)
7154 return NULL_TREE;
7155
7156 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7157 return NULL_TREE;
7158
7159 return build_real (type, real);
7160 }
7161
7162 /* Return true if the floating point expression T has an integer value.
7163 We also allow +Inf, -Inf and NaN to be considered integer values. */
7164
7165 static bool
7166 integer_valued_real_p (tree t)
7167 {
7168 switch (TREE_CODE (t))
7169 {
7170 case FLOAT_EXPR:
7171 return true;
7172
7173 case ABS_EXPR:
7174 case SAVE_EXPR:
7175 return integer_valued_real_p (TREE_OPERAND (t, 0));
7176
7177 case COMPOUND_EXPR:
7178 case MODIFY_EXPR:
7179 case BIND_EXPR:
7180 return integer_valued_real_p (TREE_OPERAND (t, 1));
7181
7182 case PLUS_EXPR:
7183 case MINUS_EXPR:
7184 case MULT_EXPR:
7185 case MIN_EXPR:
7186 case MAX_EXPR:
7187 return integer_valued_real_p (TREE_OPERAND (t, 0))
7188 && integer_valued_real_p (TREE_OPERAND (t, 1));
7189
7190 case COND_EXPR:
7191 return integer_valued_real_p (TREE_OPERAND (t, 1))
7192 && integer_valued_real_p (TREE_OPERAND (t, 2));
7193
7194 case REAL_CST:
7195 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7196
7197 case NOP_EXPR:
7198 {
7199 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7200 if (TREE_CODE (type) == INTEGER_TYPE)
7201 return true;
7202 if (TREE_CODE (type) == REAL_TYPE)
7203 return integer_valued_real_p (TREE_OPERAND (t, 0));
7204 break;
7205 }
7206
7207 case CALL_EXPR:
7208 switch (builtin_mathfn_code (t))
7209 {
7210 CASE_FLT_FN (BUILT_IN_CEIL):
7211 CASE_FLT_FN (BUILT_IN_FLOOR):
7212 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7213 CASE_FLT_FN (BUILT_IN_RINT):
7214 CASE_FLT_FN (BUILT_IN_ROUND):
7215 CASE_FLT_FN (BUILT_IN_TRUNC):
7216 return true;
7217
7218 CASE_FLT_FN (BUILT_IN_FMIN):
7219 CASE_FLT_FN (BUILT_IN_FMAX):
7220 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7221 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7222
7223 default:
7224 break;
7225 }
7226 break;
7227
7228 default:
7229 break;
7230 }
7231 return false;
7232 }
7233
7234 /* FNDECL is assumed to be a builtin where truncation can be propagated
7235 across (for instance floor((double)f) == (double)floorf (f).
7236 Do the transformation for a call with argument ARG. */
7237
7238 static tree
7239 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7240 {
7241 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7242
7243 if (!validate_arg (arg, REAL_TYPE))
7244 return NULL_TREE;
7245
7246 /* Integer rounding functions are idempotent. */
7247 if (fcode == builtin_mathfn_code (arg))
7248 return arg;
7249
7250 /* If argument is already integer valued, and we don't need to worry
7251 about setting errno, there's no need to perform rounding. */
7252 if (! flag_errno_math && integer_valued_real_p (arg))
7253 return arg;
7254
7255 if (optimize)
7256 {
7257 tree arg0 = strip_float_extensions (arg);
7258 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7259 tree newtype = TREE_TYPE (arg0);
7260 tree decl;
7261
7262 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7263 && (decl = mathfn_built_in (newtype, fcode)))
7264 return fold_convert_loc (loc, ftype,
7265 build_call_expr_loc (loc, decl, 1,
7266 fold_convert_loc (loc,
7267 newtype,
7268 arg0)));
7269 }
7270 return NULL_TREE;
7271 }
7272
7273 /* FNDECL is assumed to be builtin which can narrow the FP type of
7274 the argument, for instance lround((double)f) -> lroundf (f).
7275 Do the transformation for a call with argument ARG. */
7276
7277 static tree
7278 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7279 {
7280 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7281
7282 if (!validate_arg (arg, REAL_TYPE))
7283 return NULL_TREE;
7284
7285 /* If argument is already integer valued, and we don't need to worry
7286 about setting errno, there's no need to perform rounding. */
7287 if (! flag_errno_math && integer_valued_real_p (arg))
7288 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7289 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7290
7291 if (optimize)
7292 {
7293 tree ftype = TREE_TYPE (arg);
7294 tree arg0 = strip_float_extensions (arg);
7295 tree newtype = TREE_TYPE (arg0);
7296 tree decl;
7297
7298 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7299 && (decl = mathfn_built_in (newtype, fcode)))
7300 return build_call_expr_loc (loc, decl, 1,
7301 fold_convert_loc (loc, newtype, arg0));
7302 }
7303
7304 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7305 sizeof (int) == sizeof (long). */
7306 if (TYPE_PRECISION (integer_type_node)
7307 == TYPE_PRECISION (long_integer_type_node))
7308 {
7309 tree newfn = NULL_TREE;
7310 switch (fcode)
7311 {
7312 CASE_FLT_FN (BUILT_IN_ICEIL):
7313 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7314 break;
7315
7316 CASE_FLT_FN (BUILT_IN_IFLOOR):
7317 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7318 break;
7319
7320 CASE_FLT_FN (BUILT_IN_IROUND):
7321 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7322 break;
7323
7324 CASE_FLT_FN (BUILT_IN_IRINT):
7325 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7326 break;
7327
7328 default:
7329 break;
7330 }
7331
7332 if (newfn)
7333 {
7334 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7335 return fold_convert_loc (loc,
7336 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7337 }
7338 }
7339
7340 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7341 sizeof (long long) == sizeof (long). */
7342 if (TYPE_PRECISION (long_long_integer_type_node)
7343 == TYPE_PRECISION (long_integer_type_node))
7344 {
7345 tree newfn = NULL_TREE;
7346 switch (fcode)
7347 {
7348 CASE_FLT_FN (BUILT_IN_LLCEIL):
7349 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7350 break;
7351
7352 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7353 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7354 break;
7355
7356 CASE_FLT_FN (BUILT_IN_LLROUND):
7357 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7358 break;
7359
7360 CASE_FLT_FN (BUILT_IN_LLRINT):
7361 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7362 break;
7363
7364 default:
7365 break;
7366 }
7367
7368 if (newfn)
7369 {
7370 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7371 return fold_convert_loc (loc,
7372 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7373 }
7374 }
7375
7376 return NULL_TREE;
7377 }
7378
7379 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7380 return type. Return NULL_TREE if no simplification can be made. */
7381
7382 static tree
7383 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7384 {
7385 tree res;
7386
7387 if (!validate_arg (arg, COMPLEX_TYPE)
7388 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7389 return NULL_TREE;
7390
7391 /* Calculate the result when the argument is a constant. */
7392 if (TREE_CODE (arg) == COMPLEX_CST
7393 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7394 type, mpfr_hypot)))
7395 return res;
7396
7397 if (TREE_CODE (arg) == COMPLEX_EXPR)
7398 {
7399 tree real = TREE_OPERAND (arg, 0);
7400 tree imag = TREE_OPERAND (arg, 1);
7401
7402 /* If either part is zero, cabs is fabs of the other. */
7403 if (real_zerop (real))
7404 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7405 if (real_zerop (imag))
7406 return fold_build1_loc (loc, ABS_EXPR, type, real);
7407
7408 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7409 if (flag_unsafe_math_optimizations
7410 && operand_equal_p (real, imag, OEP_PURE_SAME))
7411 {
7412 const REAL_VALUE_TYPE sqrt2_trunc
7413 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7414 STRIP_NOPS (real);
7415 return fold_build2_loc (loc, MULT_EXPR, type,
7416 fold_build1_loc (loc, ABS_EXPR, type, real),
7417 build_real (type, sqrt2_trunc));
7418 }
7419 }
7420
7421 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7422 if (TREE_CODE (arg) == NEGATE_EXPR
7423 || TREE_CODE (arg) == CONJ_EXPR)
7424 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7425
7426 /* Don't do this when optimizing for size. */
7427 if (flag_unsafe_math_optimizations
7428 && optimize && optimize_function_for_speed_p (cfun))
7429 {
7430 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7431
7432 if (sqrtfn != NULL_TREE)
7433 {
7434 tree rpart, ipart, result;
7435
7436 arg = builtin_save_expr (arg);
7437
7438 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7439 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7440
7441 rpart = builtin_save_expr (rpart);
7442 ipart = builtin_save_expr (ipart);
7443
7444 result = fold_build2_loc (loc, PLUS_EXPR, type,
7445 fold_build2_loc (loc, MULT_EXPR, type,
7446 rpart, rpart),
7447 fold_build2_loc (loc, MULT_EXPR, type,
7448 ipart, ipart));
7449
7450 return build_call_expr_loc (loc, sqrtfn, 1, result);
7451 }
7452 }
7453
7454 return NULL_TREE;
7455 }
7456
7457 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7458 complex tree type of the result. If NEG is true, the imaginary
7459 zero is negative. */
7460
7461 static tree
7462 build_complex_cproj (tree type, bool neg)
7463 {
7464 REAL_VALUE_TYPE rinf, rzero = dconst0;
7465
7466 real_inf (&rinf);
7467 rzero.sign = neg;
7468 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7469 build_real (TREE_TYPE (type), rzero));
7470 }
7471
7472 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7473 return type. Return NULL_TREE if no simplification can be made. */
7474
7475 static tree
7476 fold_builtin_cproj (location_t loc, tree arg, tree type)
7477 {
7478 if (!validate_arg (arg, COMPLEX_TYPE)
7479 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7480 return NULL_TREE;
7481
7482 /* If there are no infinities, return arg. */
7483 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7484 return non_lvalue_loc (loc, arg);
7485
7486 /* Calculate the result when the argument is a constant. */
7487 if (TREE_CODE (arg) == COMPLEX_CST)
7488 {
7489 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7490 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7491
7492 if (real_isinf (real) || real_isinf (imag))
7493 return build_complex_cproj (type, imag->sign);
7494 else
7495 return arg;
7496 }
7497 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7498 {
7499 tree real = TREE_OPERAND (arg, 0);
7500 tree imag = TREE_OPERAND (arg, 1);
7501
7502 STRIP_NOPS (real);
7503 STRIP_NOPS (imag);
7504
7505 /* If the real part is inf and the imag part is known to be
7506 nonnegative, return (inf + 0i). Remember side-effects are
7507 possible in the imag part. */
7508 if (TREE_CODE (real) == REAL_CST
7509 && real_isinf (TREE_REAL_CST_PTR (real))
7510 && tree_expr_nonnegative_p (imag))
7511 return omit_one_operand_loc (loc, type,
7512 build_complex_cproj (type, false),
7513 arg);
7514
7515 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7516 Remember side-effects are possible in the real part. */
7517 if (TREE_CODE (imag) == REAL_CST
7518 && real_isinf (TREE_REAL_CST_PTR (imag)))
7519 return
7520 omit_one_operand_loc (loc, type,
7521 build_complex_cproj (type, TREE_REAL_CST_PTR
7522 (imag)->sign), arg);
7523 }
7524
7525 return NULL_TREE;
7526 }
7527
7528 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7529 Return NULL_TREE if no simplification can be made. */
7530
7531 static tree
7532 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7533 {
7534
7535 enum built_in_function fcode;
7536 tree res;
7537
7538 if (!validate_arg (arg, REAL_TYPE))
7539 return NULL_TREE;
7540
7541 /* Calculate the result when the argument is a constant. */
7542 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7543 return res;
7544
7545 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7546 fcode = builtin_mathfn_code (arg);
7547 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7548 {
7549 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7550 arg = fold_build2_loc (loc, MULT_EXPR, type,
7551 CALL_EXPR_ARG (arg, 0),
7552 build_real (type, dconsthalf));
7553 return build_call_expr_loc (loc, expfn, 1, arg);
7554 }
7555
7556 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7557 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7558 {
7559 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7560
7561 if (powfn)
7562 {
7563 tree arg0 = CALL_EXPR_ARG (arg, 0);
7564 tree tree_root;
7565 /* The inner root was either sqrt or cbrt. */
7566 /* This was a conditional expression but it triggered a bug
7567 in Sun C 5.5. */
7568 REAL_VALUE_TYPE dconstroot;
7569 if (BUILTIN_SQRT_P (fcode))
7570 dconstroot = dconsthalf;
7571 else
7572 dconstroot = dconst_third ();
7573
7574 /* Adjust for the outer root. */
7575 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7576 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7577 tree_root = build_real (type, dconstroot);
7578 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7579 }
7580 }
7581
7582 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7583 if (flag_unsafe_math_optimizations
7584 && (fcode == BUILT_IN_POW
7585 || fcode == BUILT_IN_POWF
7586 || fcode == BUILT_IN_POWL))
7587 {
7588 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7589 tree arg0 = CALL_EXPR_ARG (arg, 0);
7590 tree arg1 = CALL_EXPR_ARG (arg, 1);
7591 tree narg1;
7592 if (!tree_expr_nonnegative_p (arg0))
7593 arg0 = build1 (ABS_EXPR, type, arg0);
7594 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7595 build_real (type, dconsthalf));
7596 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7597 }
7598
7599 return NULL_TREE;
7600 }
7601
7602 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7603 Return NULL_TREE if no simplification can be made. */
7604
7605 static tree
7606 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7607 {
7608 const enum built_in_function fcode = builtin_mathfn_code (arg);
7609 tree res;
7610
7611 if (!validate_arg (arg, REAL_TYPE))
7612 return NULL_TREE;
7613
7614 /* Calculate the result when the argument is a constant. */
7615 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7616 return res;
7617
7618 if (flag_unsafe_math_optimizations)
7619 {
7620 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7621 if (BUILTIN_EXPONENT_P (fcode))
7622 {
7623 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7624 const REAL_VALUE_TYPE third_trunc =
7625 real_value_truncate (TYPE_MODE (type), dconst_third ());
7626 arg = fold_build2_loc (loc, MULT_EXPR, type,
7627 CALL_EXPR_ARG (arg, 0),
7628 build_real (type, third_trunc));
7629 return build_call_expr_loc (loc, expfn, 1, arg);
7630 }
7631
7632 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7633 if (BUILTIN_SQRT_P (fcode))
7634 {
7635 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7636
7637 if (powfn)
7638 {
7639 tree arg0 = CALL_EXPR_ARG (arg, 0);
7640 tree tree_root;
7641 REAL_VALUE_TYPE dconstroot = dconst_third ();
7642
7643 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7644 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7645 tree_root = build_real (type, dconstroot);
7646 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7647 }
7648 }
7649
7650 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7651 if (BUILTIN_CBRT_P (fcode))
7652 {
7653 tree arg0 = CALL_EXPR_ARG (arg, 0);
7654 if (tree_expr_nonnegative_p (arg0))
7655 {
7656 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7657
7658 if (powfn)
7659 {
7660 tree tree_root;
7661 REAL_VALUE_TYPE dconstroot;
7662
7663 real_arithmetic (&dconstroot, MULT_EXPR,
7664 dconst_third_ptr (), dconst_third_ptr ());
7665 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7666 tree_root = build_real (type, dconstroot);
7667 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7668 }
7669 }
7670 }
7671
7672 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7673 if (fcode == BUILT_IN_POW
7674 || fcode == BUILT_IN_POWF
7675 || fcode == BUILT_IN_POWL)
7676 {
7677 tree arg00 = CALL_EXPR_ARG (arg, 0);
7678 tree arg01 = CALL_EXPR_ARG (arg, 1);
7679 if (tree_expr_nonnegative_p (arg00))
7680 {
7681 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7682 const REAL_VALUE_TYPE dconstroot
7683 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7684 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7685 build_real (type, dconstroot));
7686 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7687 }
7688 }
7689 }
7690 return NULL_TREE;
7691 }
7692
7693 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7694 TYPE is the type of the return value. Return NULL_TREE if no
7695 simplification can be made. */
7696
7697 static tree
7698 fold_builtin_cos (location_t loc,
7699 tree arg, tree type, tree fndecl)
7700 {
7701 tree res, narg;
7702
7703 if (!validate_arg (arg, REAL_TYPE))
7704 return NULL_TREE;
7705
7706 /* Calculate the result when the argument is a constant. */
7707 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7708 return res;
7709
7710 /* Optimize cos(-x) into cos (x). */
7711 if ((narg = fold_strip_sign_ops (arg)))
7712 return build_call_expr_loc (loc, fndecl, 1, narg);
7713
7714 return NULL_TREE;
7715 }
7716
7717 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7718 Return NULL_TREE if no simplification can be made. */
7719
7720 static tree
7721 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7722 {
7723 if (validate_arg (arg, REAL_TYPE))
7724 {
7725 tree res, narg;
7726
7727 /* Calculate the result when the argument is a constant. */
7728 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7729 return res;
7730
7731 /* Optimize cosh(-x) into cosh (x). */
7732 if ((narg = fold_strip_sign_ops (arg)))
7733 return build_call_expr_loc (loc, fndecl, 1, narg);
7734 }
7735
7736 return NULL_TREE;
7737 }
7738
7739 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7740 argument ARG. TYPE is the type of the return value. Return
7741 NULL_TREE if no simplification can be made. */
7742
7743 static tree
7744 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7745 bool hyper)
7746 {
7747 if (validate_arg (arg, COMPLEX_TYPE)
7748 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7749 {
7750 tree tmp;
7751
7752 /* Calculate the result when the argument is a constant. */
7753 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7754 return tmp;
7755
7756 /* Optimize fn(-x) into fn(x). */
7757 if ((tmp = fold_strip_sign_ops (arg)))
7758 return build_call_expr_loc (loc, fndecl, 1, tmp);
7759 }
7760
7761 return NULL_TREE;
7762 }
7763
7764 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7765 Return NULL_TREE if no simplification can be made. */
7766
7767 static tree
7768 fold_builtin_tan (tree arg, tree type)
7769 {
7770 enum built_in_function fcode;
7771 tree res;
7772
7773 if (!validate_arg (arg, REAL_TYPE))
7774 return NULL_TREE;
7775
7776 /* Calculate the result when the argument is a constant. */
7777 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7778 return res;
7779
7780 /* Optimize tan(atan(x)) = x. */
7781 fcode = builtin_mathfn_code (arg);
7782 if (flag_unsafe_math_optimizations
7783 && (fcode == BUILT_IN_ATAN
7784 || fcode == BUILT_IN_ATANF
7785 || fcode == BUILT_IN_ATANL))
7786 return CALL_EXPR_ARG (arg, 0);
7787
7788 return NULL_TREE;
7789 }
7790
7791 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7792 NULL_TREE if no simplification can be made. */
7793
7794 static tree
7795 fold_builtin_sincos (location_t loc,
7796 tree arg0, tree arg1, tree arg2)
7797 {
7798 tree type;
7799 tree res, fn, call;
7800
7801 if (!validate_arg (arg0, REAL_TYPE)
7802 || !validate_arg (arg1, POINTER_TYPE)
7803 || !validate_arg (arg2, POINTER_TYPE))
7804 return NULL_TREE;
7805
7806 type = TREE_TYPE (arg0);
7807
7808 /* Calculate the result when the argument is a constant. */
7809 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7810 return res;
7811
7812 /* Canonicalize sincos to cexpi. */
7813 if (!TARGET_C99_FUNCTIONS)
7814 return NULL_TREE;
7815 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7816 if (!fn)
7817 return NULL_TREE;
7818
7819 call = build_call_expr_loc (loc, fn, 1, arg0);
7820 call = builtin_save_expr (call);
7821
7822 return build2 (COMPOUND_EXPR, void_type_node,
7823 build2 (MODIFY_EXPR, void_type_node,
7824 build_fold_indirect_ref_loc (loc, arg1),
7825 build1 (IMAGPART_EXPR, type, call)),
7826 build2 (MODIFY_EXPR, void_type_node,
7827 build_fold_indirect_ref_loc (loc, arg2),
7828 build1 (REALPART_EXPR, type, call)));
7829 }
7830
7831 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7832 NULL_TREE if no simplification can be made. */
7833
7834 static tree
7835 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7836 {
7837 tree rtype;
7838 tree realp, imagp, ifn;
7839 tree res;
7840
7841 if (!validate_arg (arg0, COMPLEX_TYPE)
7842 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7843 return NULL_TREE;
7844
7845 /* Calculate the result when the argument is a constant. */
7846 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7847 return res;
7848
7849 rtype = TREE_TYPE (TREE_TYPE (arg0));
7850
7851 /* In case we can figure out the real part of arg0 and it is constant zero
7852 fold to cexpi. */
7853 if (!TARGET_C99_FUNCTIONS)
7854 return NULL_TREE;
7855 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7856 if (!ifn)
7857 return NULL_TREE;
7858
7859 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7860 && real_zerop (realp))
7861 {
7862 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7863 return build_call_expr_loc (loc, ifn, 1, narg);
7864 }
7865
7866 /* In case we can easily decompose real and imaginary parts split cexp
7867 to exp (r) * cexpi (i). */
7868 if (flag_unsafe_math_optimizations
7869 && realp)
7870 {
7871 tree rfn, rcall, icall;
7872
7873 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7874 if (!rfn)
7875 return NULL_TREE;
7876
7877 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7878 if (!imagp)
7879 return NULL_TREE;
7880
7881 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7882 icall = builtin_save_expr (icall);
7883 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7884 rcall = builtin_save_expr (rcall);
7885 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7886 fold_build2_loc (loc, MULT_EXPR, rtype,
7887 rcall,
7888 fold_build1_loc (loc, REALPART_EXPR,
7889 rtype, icall)),
7890 fold_build2_loc (loc, MULT_EXPR, rtype,
7891 rcall,
7892 fold_build1_loc (loc, IMAGPART_EXPR,
7893 rtype, icall)));
7894 }
7895
7896 return NULL_TREE;
7897 }
7898
7899 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7900 Return NULL_TREE if no simplification can be made. */
7901
7902 static tree
7903 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7904 {
7905 if (!validate_arg (arg, REAL_TYPE))
7906 return NULL_TREE;
7907
7908 /* Optimize trunc of constant value. */
7909 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7910 {
7911 REAL_VALUE_TYPE r, x;
7912 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7913
7914 x = TREE_REAL_CST (arg);
7915 real_trunc (&r, TYPE_MODE (type), &x);
7916 return build_real (type, r);
7917 }
7918
7919 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7920 }
7921
7922 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7923 Return NULL_TREE if no simplification can be made. */
7924
7925 static tree
7926 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7927 {
7928 if (!validate_arg (arg, REAL_TYPE))
7929 return NULL_TREE;
7930
7931 /* Optimize floor of constant value. */
7932 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7933 {
7934 REAL_VALUE_TYPE x;
7935
7936 x = TREE_REAL_CST (arg);
7937 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7938 {
7939 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7940 REAL_VALUE_TYPE r;
7941
7942 real_floor (&r, TYPE_MODE (type), &x);
7943 return build_real (type, r);
7944 }
7945 }
7946
7947 /* Fold floor (x) where x is nonnegative to trunc (x). */
7948 if (tree_expr_nonnegative_p (arg))
7949 {
7950 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7951 if (truncfn)
7952 return build_call_expr_loc (loc, truncfn, 1, arg);
7953 }
7954
7955 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7956 }
7957
7958 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7959 Return NULL_TREE if no simplification can be made. */
7960
7961 static tree
7962 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7963 {
7964 if (!validate_arg (arg, REAL_TYPE))
7965 return NULL_TREE;
7966
7967 /* Optimize ceil of constant value. */
7968 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7969 {
7970 REAL_VALUE_TYPE x;
7971
7972 x = TREE_REAL_CST (arg);
7973 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7974 {
7975 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7976 REAL_VALUE_TYPE r;
7977
7978 real_ceil (&r, TYPE_MODE (type), &x);
7979 return build_real (type, r);
7980 }
7981 }
7982
7983 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7984 }
7985
7986 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7987 Return NULL_TREE if no simplification can be made. */
7988
7989 static tree
7990 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7991 {
7992 if (!validate_arg (arg, REAL_TYPE))
7993 return NULL_TREE;
7994
7995 /* Optimize round of constant value. */
7996 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7997 {
7998 REAL_VALUE_TYPE x;
7999
8000 x = TREE_REAL_CST (arg);
8001 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8002 {
8003 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8004 REAL_VALUE_TYPE r;
8005
8006 real_round (&r, TYPE_MODE (type), &x);
8007 return build_real (type, r);
8008 }
8009 }
8010
8011 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8012 }
8013
8014 /* Fold function call to builtin lround, lroundf or lroundl (or the
8015 corresponding long long versions) and other rounding functions. ARG
8016 is the argument to the call. Return NULL_TREE if no simplification
8017 can be made. */
8018
8019 static tree
8020 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8021 {
8022 if (!validate_arg (arg, REAL_TYPE))
8023 return NULL_TREE;
8024
8025 /* Optimize lround of constant value. */
8026 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8027 {
8028 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8029
8030 if (real_isfinite (&x))
8031 {
8032 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8033 tree ftype = TREE_TYPE (arg);
8034 double_int val;
8035 REAL_VALUE_TYPE r;
8036
8037 switch (DECL_FUNCTION_CODE (fndecl))
8038 {
8039 CASE_FLT_FN (BUILT_IN_IFLOOR):
8040 CASE_FLT_FN (BUILT_IN_LFLOOR):
8041 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8042 real_floor (&r, TYPE_MODE (ftype), &x);
8043 break;
8044
8045 CASE_FLT_FN (BUILT_IN_ICEIL):
8046 CASE_FLT_FN (BUILT_IN_LCEIL):
8047 CASE_FLT_FN (BUILT_IN_LLCEIL):
8048 real_ceil (&r, TYPE_MODE (ftype), &x);
8049 break;
8050
8051 CASE_FLT_FN (BUILT_IN_IROUND):
8052 CASE_FLT_FN (BUILT_IN_LROUND):
8053 CASE_FLT_FN (BUILT_IN_LLROUND):
8054 real_round (&r, TYPE_MODE (ftype), &x);
8055 break;
8056
8057 default:
8058 gcc_unreachable ();
8059 }
8060
8061 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8062 if (double_int_fits_to_tree_p (itype, val))
8063 return double_int_to_tree (itype, val);
8064 }
8065 }
8066
8067 switch (DECL_FUNCTION_CODE (fndecl))
8068 {
8069 CASE_FLT_FN (BUILT_IN_LFLOOR):
8070 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8071 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8072 if (tree_expr_nonnegative_p (arg))
8073 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8074 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8075 break;
8076 default:;
8077 }
8078
8079 return fold_fixed_mathfn (loc, fndecl, arg);
8080 }
8081
8082 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8083 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8084 the argument to the call. Return NULL_TREE if no simplification can
8085 be made. */
8086
8087 static tree
8088 fold_builtin_bitop (tree fndecl, tree arg)
8089 {
8090 if (!validate_arg (arg, INTEGER_TYPE))
8091 return NULL_TREE;
8092
8093 /* Optimize for constant argument. */
8094 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8095 {
8096 HOST_WIDE_INT hi, width, result;
8097 unsigned HOST_WIDE_INT lo;
8098 tree type;
8099
8100 type = TREE_TYPE (arg);
8101 width = TYPE_PRECISION (type);
8102 lo = TREE_INT_CST_LOW (arg);
8103
8104 /* Clear all the bits that are beyond the type's precision. */
8105 if (width > HOST_BITS_PER_WIDE_INT)
8106 {
8107 hi = TREE_INT_CST_HIGH (arg);
8108 if (width < HOST_BITS_PER_DOUBLE_INT)
8109 hi &= ~((unsigned HOST_WIDE_INT) (-1)
8110 << (width - HOST_BITS_PER_WIDE_INT));
8111 }
8112 else
8113 {
8114 hi = 0;
8115 if (width < HOST_BITS_PER_WIDE_INT)
8116 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8117 }
8118
8119 switch (DECL_FUNCTION_CODE (fndecl))
8120 {
8121 CASE_INT_FN (BUILT_IN_FFS):
8122 if (lo != 0)
8123 result = ffs_hwi (lo);
8124 else if (hi != 0)
8125 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8126 else
8127 result = 0;
8128 break;
8129
8130 CASE_INT_FN (BUILT_IN_CLZ):
8131 if (hi != 0)
8132 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8133 else if (lo != 0)
8134 result = width - floor_log2 (lo) - 1;
8135 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8136 result = width;
8137 break;
8138
8139 CASE_INT_FN (BUILT_IN_CTZ):
8140 if (lo != 0)
8141 result = ctz_hwi (lo);
8142 else if (hi != 0)
8143 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8144 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8145 result = width;
8146 break;
8147
8148 CASE_INT_FN (BUILT_IN_CLRSB):
8149 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8150 return NULL_TREE;
8151 if (width > HOST_BITS_PER_WIDE_INT
8152 && (hi & ((unsigned HOST_WIDE_INT) 1
8153 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8154 {
8155 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
8156 << (width - HOST_BITS_PER_WIDE_INT - 1));
8157 lo = ~lo;
8158 }
8159 else if (width <= HOST_BITS_PER_WIDE_INT
8160 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8161 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
8162 if (hi != 0)
8163 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8164 else if (lo != 0)
8165 result = width - floor_log2 (lo) - 2;
8166 else
8167 result = width - 1;
8168 break;
8169
8170 CASE_INT_FN (BUILT_IN_POPCOUNT):
8171 result = 0;
8172 while (lo)
8173 result++, lo &= lo - 1;
8174 while (hi)
8175 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8176 break;
8177
8178 CASE_INT_FN (BUILT_IN_PARITY):
8179 result = 0;
8180 while (lo)
8181 result++, lo &= lo - 1;
8182 while (hi)
8183 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8184 result &= 1;
8185 break;
8186
8187 default:
8188 gcc_unreachable ();
8189 }
8190
8191 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8192 }
8193
8194 return NULL_TREE;
8195 }
8196
8197 /* Fold function call to builtin_bswap and the short, long and long long
8198 variants. Return NULL_TREE if no simplification can be made. */
8199 static tree
8200 fold_builtin_bswap (tree fndecl, tree arg)
8201 {
8202 if (! validate_arg (arg, INTEGER_TYPE))
8203 return NULL_TREE;
8204
8205 /* Optimize constant value. */
8206 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8207 {
8208 HOST_WIDE_INT hi, width, r_hi = 0;
8209 unsigned HOST_WIDE_INT lo, r_lo = 0;
8210 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8211
8212 width = TYPE_PRECISION (type);
8213 lo = TREE_INT_CST_LOW (arg);
8214 hi = TREE_INT_CST_HIGH (arg);
8215
8216 switch (DECL_FUNCTION_CODE (fndecl))
8217 {
8218 case BUILT_IN_BSWAP16:
8219 case BUILT_IN_BSWAP32:
8220 case BUILT_IN_BSWAP64:
8221 {
8222 int s;
8223
8224 for (s = 0; s < width; s += 8)
8225 {
8226 int d = width - s - 8;
8227 unsigned HOST_WIDE_INT byte;
8228
8229 if (s < HOST_BITS_PER_WIDE_INT)
8230 byte = (lo >> s) & 0xff;
8231 else
8232 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8233
8234 if (d < HOST_BITS_PER_WIDE_INT)
8235 r_lo |= byte << d;
8236 else
8237 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8238 }
8239 }
8240
8241 break;
8242
8243 default:
8244 gcc_unreachable ();
8245 }
8246
8247 if (width < HOST_BITS_PER_WIDE_INT)
8248 return build_int_cst (type, r_lo);
8249 else
8250 return build_int_cst_wide (type, r_lo, r_hi);
8251 }
8252
8253 return NULL_TREE;
8254 }
8255
8256 /* A subroutine of fold_builtin to fold the various logarithmic
8257 functions. Return NULL_TREE if no simplification can me made.
8258 FUNC is the corresponding MPFR logarithm function. */
8259
8260 static tree
8261 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8262 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8263 {
8264 if (validate_arg (arg, REAL_TYPE))
8265 {
8266 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8267 tree res;
8268 const enum built_in_function fcode = builtin_mathfn_code (arg);
8269
8270 /* Calculate the result when the argument is a constant. */
8271 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8272 return res;
8273
8274 /* Special case, optimize logN(expN(x)) = x. */
8275 if (flag_unsafe_math_optimizations
8276 && ((func == mpfr_log
8277 && (fcode == BUILT_IN_EXP
8278 || fcode == BUILT_IN_EXPF
8279 || fcode == BUILT_IN_EXPL))
8280 || (func == mpfr_log2
8281 && (fcode == BUILT_IN_EXP2
8282 || fcode == BUILT_IN_EXP2F
8283 || fcode == BUILT_IN_EXP2L))
8284 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8285 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8286
8287 /* Optimize logN(func()) for various exponential functions. We
8288 want to determine the value "x" and the power "exponent" in
8289 order to transform logN(x**exponent) into exponent*logN(x). */
8290 if (flag_unsafe_math_optimizations)
8291 {
8292 tree exponent = 0, x = 0;
8293
8294 switch (fcode)
8295 {
8296 CASE_FLT_FN (BUILT_IN_EXP):
8297 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8298 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8299 dconst_e ()));
8300 exponent = CALL_EXPR_ARG (arg, 0);
8301 break;
8302 CASE_FLT_FN (BUILT_IN_EXP2):
8303 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8304 x = build_real (type, dconst2);
8305 exponent = CALL_EXPR_ARG (arg, 0);
8306 break;
8307 CASE_FLT_FN (BUILT_IN_EXP10):
8308 CASE_FLT_FN (BUILT_IN_POW10):
8309 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8310 {
8311 REAL_VALUE_TYPE dconst10;
8312 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8313 x = build_real (type, dconst10);
8314 }
8315 exponent = CALL_EXPR_ARG (arg, 0);
8316 break;
8317 CASE_FLT_FN (BUILT_IN_SQRT):
8318 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8319 x = CALL_EXPR_ARG (arg, 0);
8320 exponent = build_real (type, dconsthalf);
8321 break;
8322 CASE_FLT_FN (BUILT_IN_CBRT):
8323 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8324 x = CALL_EXPR_ARG (arg, 0);
8325 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8326 dconst_third ()));
8327 break;
8328 CASE_FLT_FN (BUILT_IN_POW):
8329 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8330 x = CALL_EXPR_ARG (arg, 0);
8331 exponent = CALL_EXPR_ARG (arg, 1);
8332 break;
8333 default:
8334 break;
8335 }
8336
8337 /* Now perform the optimization. */
8338 if (x && exponent)
8339 {
8340 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8341 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8342 }
8343 }
8344 }
8345
8346 return NULL_TREE;
8347 }
8348
8349 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8350 NULL_TREE if no simplification can be made. */
8351
8352 static tree
8353 fold_builtin_hypot (location_t loc, tree fndecl,
8354 tree arg0, tree arg1, tree type)
8355 {
8356 tree res, narg0, narg1;
8357
8358 if (!validate_arg (arg0, REAL_TYPE)
8359 || !validate_arg (arg1, REAL_TYPE))
8360 return NULL_TREE;
8361
8362 /* Calculate the result when the argument is a constant. */
8363 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8364 return res;
8365
8366 /* If either argument to hypot has a negate or abs, strip that off.
8367 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8368 narg0 = fold_strip_sign_ops (arg0);
8369 narg1 = fold_strip_sign_ops (arg1);
8370 if (narg0 || narg1)
8371 {
8372 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8373 narg1 ? narg1 : arg1);
8374 }
8375
8376 /* If either argument is zero, hypot is fabs of the other. */
8377 if (real_zerop (arg0))
8378 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8379 else if (real_zerop (arg1))
8380 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8381
8382 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8383 if (flag_unsafe_math_optimizations
8384 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8385 {
8386 const REAL_VALUE_TYPE sqrt2_trunc
8387 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8388 return fold_build2_loc (loc, MULT_EXPR, type,
8389 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8390 build_real (type, sqrt2_trunc));
8391 }
8392
8393 return NULL_TREE;
8394 }
8395
8396
8397 /* Fold a builtin function call to pow, powf, or powl. Return
8398 NULL_TREE if no simplification can be made. */
8399 static tree
8400 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8401 {
8402 tree res;
8403
8404 if (!validate_arg (arg0, REAL_TYPE)
8405 || !validate_arg (arg1, REAL_TYPE))
8406 return NULL_TREE;
8407
8408 /* Calculate the result when the argument is a constant. */
8409 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8410 return res;
8411
8412 /* Optimize pow(1.0,y) = 1.0. */
8413 if (real_onep (arg0))
8414 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8415
8416 if (TREE_CODE (arg1) == REAL_CST
8417 && !TREE_OVERFLOW (arg1))
8418 {
8419 REAL_VALUE_TYPE cint;
8420 REAL_VALUE_TYPE c;
8421 HOST_WIDE_INT n;
8422
8423 c = TREE_REAL_CST (arg1);
8424
8425 /* Optimize pow(x,0.0) = 1.0. */
8426 if (REAL_VALUES_EQUAL (c, dconst0))
8427 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8428 arg0);
8429
8430 /* Optimize pow(x,1.0) = x. */
8431 if (REAL_VALUES_EQUAL (c, dconst1))
8432 return arg0;
8433
8434 /* Optimize pow(x,-1.0) = 1.0/x. */
8435 if (REAL_VALUES_EQUAL (c, dconstm1))
8436 return fold_build2_loc (loc, RDIV_EXPR, type,
8437 build_real (type, dconst1), arg0);
8438
8439 /* Optimize pow(x,0.5) = sqrt(x). */
8440 if (flag_unsafe_math_optimizations
8441 && REAL_VALUES_EQUAL (c, dconsthalf))
8442 {
8443 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8444
8445 if (sqrtfn != NULL_TREE)
8446 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8447 }
8448
8449 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8450 if (flag_unsafe_math_optimizations)
8451 {
8452 const REAL_VALUE_TYPE dconstroot
8453 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8454
8455 if (REAL_VALUES_EQUAL (c, dconstroot))
8456 {
8457 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8458 if (cbrtfn != NULL_TREE)
8459 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8460 }
8461 }
8462
8463 /* Check for an integer exponent. */
8464 n = real_to_integer (&c);
8465 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8466 if (real_identical (&c, &cint))
8467 {
8468 /* Attempt to evaluate pow at compile-time, unless this should
8469 raise an exception. */
8470 if (TREE_CODE (arg0) == REAL_CST
8471 && !TREE_OVERFLOW (arg0)
8472 && (n > 0
8473 || (!flag_trapping_math && !flag_errno_math)
8474 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8475 {
8476 REAL_VALUE_TYPE x;
8477 bool inexact;
8478
8479 x = TREE_REAL_CST (arg0);
8480 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8481 if (flag_unsafe_math_optimizations || !inexact)
8482 return build_real (type, x);
8483 }
8484
8485 /* Strip sign ops from even integer powers. */
8486 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8487 {
8488 tree narg0 = fold_strip_sign_ops (arg0);
8489 if (narg0)
8490 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8491 }
8492 }
8493 }
8494
8495 if (flag_unsafe_math_optimizations)
8496 {
8497 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8498
8499 /* Optimize pow(expN(x),y) = expN(x*y). */
8500 if (BUILTIN_EXPONENT_P (fcode))
8501 {
8502 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8503 tree arg = CALL_EXPR_ARG (arg0, 0);
8504 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8505 return build_call_expr_loc (loc, expfn, 1, arg);
8506 }
8507
8508 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8509 if (BUILTIN_SQRT_P (fcode))
8510 {
8511 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8512 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8513 build_real (type, dconsthalf));
8514 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8515 }
8516
8517 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8518 if (BUILTIN_CBRT_P (fcode))
8519 {
8520 tree arg = CALL_EXPR_ARG (arg0, 0);
8521 if (tree_expr_nonnegative_p (arg))
8522 {
8523 const REAL_VALUE_TYPE dconstroot
8524 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8525 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8526 build_real (type, dconstroot));
8527 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8528 }
8529 }
8530
8531 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8532 if (fcode == BUILT_IN_POW
8533 || fcode == BUILT_IN_POWF
8534 || fcode == BUILT_IN_POWL)
8535 {
8536 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8537 if (tree_expr_nonnegative_p (arg00))
8538 {
8539 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8540 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8541 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8542 }
8543 }
8544 }
8545
8546 return NULL_TREE;
8547 }
8548
8549 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8550 Return NULL_TREE if no simplification can be made. */
8551 static tree
8552 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8553 tree arg0, tree arg1, tree type)
8554 {
8555 if (!validate_arg (arg0, REAL_TYPE)
8556 || !validate_arg (arg1, INTEGER_TYPE))
8557 return NULL_TREE;
8558
8559 /* Optimize pow(1.0,y) = 1.0. */
8560 if (real_onep (arg0))
8561 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8562
8563 if (host_integerp (arg1, 0))
8564 {
8565 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8566
8567 /* Evaluate powi at compile-time. */
8568 if (TREE_CODE (arg0) == REAL_CST
8569 && !TREE_OVERFLOW (arg0))
8570 {
8571 REAL_VALUE_TYPE x;
8572 x = TREE_REAL_CST (arg0);
8573 real_powi (&x, TYPE_MODE (type), &x, c);
8574 return build_real (type, x);
8575 }
8576
8577 /* Optimize pow(x,0) = 1.0. */
8578 if (c == 0)
8579 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8580 arg0);
8581
8582 /* Optimize pow(x,1) = x. */
8583 if (c == 1)
8584 return arg0;
8585
8586 /* Optimize pow(x,-1) = 1.0/x. */
8587 if (c == -1)
8588 return fold_build2_loc (loc, RDIV_EXPR, type,
8589 build_real (type, dconst1), arg0);
8590 }
8591
8592 return NULL_TREE;
8593 }
8594
8595 /* A subroutine of fold_builtin to fold the various exponent
8596 functions. Return NULL_TREE if no simplification can be made.
8597 FUNC is the corresponding MPFR exponent function. */
8598
8599 static tree
8600 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8601 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8602 {
8603 if (validate_arg (arg, REAL_TYPE))
8604 {
8605 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8606 tree res;
8607
8608 /* Calculate the result when the argument is a constant. */
8609 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8610 return res;
8611
8612 /* Optimize expN(logN(x)) = x. */
8613 if (flag_unsafe_math_optimizations)
8614 {
8615 const enum built_in_function fcode = builtin_mathfn_code (arg);
8616
8617 if ((func == mpfr_exp
8618 && (fcode == BUILT_IN_LOG
8619 || fcode == BUILT_IN_LOGF
8620 || fcode == BUILT_IN_LOGL))
8621 || (func == mpfr_exp2
8622 && (fcode == BUILT_IN_LOG2
8623 || fcode == BUILT_IN_LOG2F
8624 || fcode == BUILT_IN_LOG2L))
8625 || (func == mpfr_exp10
8626 && (fcode == BUILT_IN_LOG10
8627 || fcode == BUILT_IN_LOG10F
8628 || fcode == BUILT_IN_LOG10L)))
8629 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8630 }
8631 }
8632
8633 return NULL_TREE;
8634 }
8635
8636 /* Return true if VAR is a VAR_DECL or a component thereof. */
8637
8638 static bool
8639 var_decl_component_p (tree var)
8640 {
8641 tree inner = var;
8642 while (handled_component_p (inner))
8643 inner = TREE_OPERAND (inner, 0);
8644 return SSA_VAR_P (inner);
8645 }
8646
8647 /* Fold function call to builtin memset. Return
8648 NULL_TREE if no simplification can be made. */
8649
8650 static tree
8651 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8652 tree type, bool ignore)
8653 {
8654 tree var, ret, etype;
8655 unsigned HOST_WIDE_INT length, cval;
8656
8657 if (! validate_arg (dest, POINTER_TYPE)
8658 || ! validate_arg (c, INTEGER_TYPE)
8659 || ! validate_arg (len, INTEGER_TYPE))
8660 return NULL_TREE;
8661
8662 if (! host_integerp (len, 1))
8663 return NULL_TREE;
8664
8665 /* If the LEN parameter is zero, return DEST. */
8666 if (integer_zerop (len))
8667 return omit_one_operand_loc (loc, type, dest, c);
8668
8669 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8670 return NULL_TREE;
8671
8672 var = dest;
8673 STRIP_NOPS (var);
8674 if (TREE_CODE (var) != ADDR_EXPR)
8675 return NULL_TREE;
8676
8677 var = TREE_OPERAND (var, 0);
8678 if (TREE_THIS_VOLATILE (var))
8679 return NULL_TREE;
8680
8681 etype = TREE_TYPE (var);
8682 if (TREE_CODE (etype) == ARRAY_TYPE)
8683 etype = TREE_TYPE (etype);
8684
8685 if (!INTEGRAL_TYPE_P (etype)
8686 && !POINTER_TYPE_P (etype))
8687 return NULL_TREE;
8688
8689 if (! var_decl_component_p (var))
8690 return NULL_TREE;
8691
8692 length = tree_low_cst (len, 1);
8693 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8694 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8695 return NULL_TREE;
8696
8697 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8698 return NULL_TREE;
8699
8700 if (integer_zerop (c))
8701 cval = 0;
8702 else
8703 {
8704 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8705 return NULL_TREE;
8706
8707 cval = TREE_INT_CST_LOW (c);
8708 cval &= 0xff;
8709 cval |= cval << 8;
8710 cval |= cval << 16;
8711 cval |= (cval << 31) << 1;
8712 }
8713
8714 ret = build_int_cst_type (etype, cval);
8715 var = build_fold_indirect_ref_loc (loc,
8716 fold_convert_loc (loc,
8717 build_pointer_type (etype),
8718 dest));
8719 ret = build2 (MODIFY_EXPR, etype, var, ret);
8720 if (ignore)
8721 return ret;
8722
8723 return omit_one_operand_loc (loc, type, dest, ret);
8724 }
8725
8726 /* Fold function call to builtin memset. Return
8727 NULL_TREE if no simplification can be made. */
8728
8729 static tree
8730 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8731 {
8732 if (! validate_arg (dest, POINTER_TYPE)
8733 || ! validate_arg (size, INTEGER_TYPE))
8734 return NULL_TREE;
8735
8736 if (!ignore)
8737 return NULL_TREE;
8738
8739 /* New argument list transforming bzero(ptr x, int y) to
8740 memset(ptr x, int 0, size_t y). This is done this way
8741 so that if it isn't expanded inline, we fallback to
8742 calling bzero instead of memset. */
8743
8744 return fold_builtin_memset (loc, dest, integer_zero_node,
8745 fold_convert_loc (loc, size_type_node, size),
8746 void_type_node, ignore);
8747 }
8748
8749 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8750 NULL_TREE if no simplification can be made.
8751 If ENDP is 0, return DEST (like memcpy).
8752 If ENDP is 1, return DEST+LEN (like mempcpy).
8753 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8754 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8755 (memmove). */
8756
8757 static tree
8758 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8759 tree len, tree type, bool ignore, int endp)
8760 {
8761 tree destvar, srcvar, expr;
8762
8763 if (! validate_arg (dest, POINTER_TYPE)
8764 || ! validate_arg (src, POINTER_TYPE)
8765 || ! validate_arg (len, INTEGER_TYPE))
8766 return NULL_TREE;
8767
8768 /* If the LEN parameter is zero, return DEST. */
8769 if (integer_zerop (len))
8770 return omit_one_operand_loc (loc, type, dest, src);
8771
8772 /* If SRC and DEST are the same (and not volatile), return
8773 DEST{,+LEN,+LEN-1}. */
8774 if (operand_equal_p (src, dest, 0))
8775 expr = len;
8776 else
8777 {
8778 tree srctype, desttype;
8779 unsigned int src_align, dest_align;
8780 tree off0;
8781
8782 if (endp == 3)
8783 {
8784 src_align = get_pointer_alignment (src);
8785 dest_align = get_pointer_alignment (dest);
8786
8787 /* Both DEST and SRC must be pointer types.
8788 ??? This is what old code did. Is the testing for pointer types
8789 really mandatory?
8790
8791 If either SRC is readonly or length is 1, we can use memcpy. */
8792 if (!dest_align || !src_align)
8793 return NULL_TREE;
8794 if (readonly_data_expr (src)
8795 || (host_integerp (len, 1)
8796 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8797 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8798 {
8799 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8800 if (!fn)
8801 return NULL_TREE;
8802 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8803 }
8804
8805 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8806 if (TREE_CODE (src) == ADDR_EXPR
8807 && TREE_CODE (dest) == ADDR_EXPR)
8808 {
8809 tree src_base, dest_base, fn;
8810 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8811 HOST_WIDE_INT size = -1;
8812 HOST_WIDE_INT maxsize = -1;
8813
8814 srcvar = TREE_OPERAND (src, 0);
8815 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8816 &size, &maxsize);
8817 destvar = TREE_OPERAND (dest, 0);
8818 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8819 &size, &maxsize);
8820 if (host_integerp (len, 1))
8821 maxsize = tree_low_cst (len, 1);
8822 else
8823 maxsize = -1;
8824 src_offset /= BITS_PER_UNIT;
8825 dest_offset /= BITS_PER_UNIT;
8826 if (SSA_VAR_P (src_base)
8827 && SSA_VAR_P (dest_base))
8828 {
8829 if (operand_equal_p (src_base, dest_base, 0)
8830 && ranges_overlap_p (src_offset, maxsize,
8831 dest_offset, maxsize))
8832 return NULL_TREE;
8833 }
8834 else if (TREE_CODE (src_base) == MEM_REF
8835 && TREE_CODE (dest_base) == MEM_REF)
8836 {
8837 double_int off;
8838 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8839 TREE_OPERAND (dest_base, 0), 0))
8840 return NULL_TREE;
8841 off = mem_ref_offset (src_base) +
8842 double_int::from_shwi (src_offset);
8843 if (!off.fits_shwi ())
8844 return NULL_TREE;
8845 src_offset = off.low;
8846 off = mem_ref_offset (dest_base) +
8847 double_int::from_shwi (dest_offset);
8848 if (!off.fits_shwi ())
8849 return NULL_TREE;
8850 dest_offset = off.low;
8851 if (ranges_overlap_p (src_offset, maxsize,
8852 dest_offset, maxsize))
8853 return NULL_TREE;
8854 }
8855 else
8856 return NULL_TREE;
8857
8858 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8859 if (!fn)
8860 return NULL_TREE;
8861 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8862 }
8863
8864 /* If the destination and source do not alias optimize into
8865 memcpy as well. */
8866 if ((is_gimple_min_invariant (dest)
8867 || TREE_CODE (dest) == SSA_NAME)
8868 && (is_gimple_min_invariant (src)
8869 || TREE_CODE (src) == SSA_NAME))
8870 {
8871 ao_ref destr, srcr;
8872 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8873 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8874 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8875 {
8876 tree fn;
8877 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8878 if (!fn)
8879 return NULL_TREE;
8880 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8881 }
8882 }
8883
8884 return NULL_TREE;
8885 }
8886
8887 if (!host_integerp (len, 0))
8888 return NULL_TREE;
8889 /* FIXME:
8890 This logic lose for arguments like (type *)malloc (sizeof (type)),
8891 since we strip the casts of up to VOID return value from malloc.
8892 Perhaps we ought to inherit type from non-VOID argument here? */
8893 STRIP_NOPS (src);
8894 STRIP_NOPS (dest);
8895 if (!POINTER_TYPE_P (TREE_TYPE (src))
8896 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8897 return NULL_TREE;
8898 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8899 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8900 {
8901 tree tem = TREE_OPERAND (src, 0);
8902 STRIP_NOPS (tem);
8903 if (tem != TREE_OPERAND (src, 0))
8904 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8905 }
8906 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8907 {
8908 tree tem = TREE_OPERAND (dest, 0);
8909 STRIP_NOPS (tem);
8910 if (tem != TREE_OPERAND (dest, 0))
8911 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8912 }
8913 srctype = TREE_TYPE (TREE_TYPE (src));
8914 if (TREE_CODE (srctype) == ARRAY_TYPE
8915 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8916 {
8917 srctype = TREE_TYPE (srctype);
8918 STRIP_NOPS (src);
8919 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8920 }
8921 desttype = TREE_TYPE (TREE_TYPE (dest));
8922 if (TREE_CODE (desttype) == ARRAY_TYPE
8923 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8924 {
8925 desttype = TREE_TYPE (desttype);
8926 STRIP_NOPS (dest);
8927 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8928 }
8929 if (TREE_ADDRESSABLE (srctype)
8930 || TREE_ADDRESSABLE (desttype))
8931 return NULL_TREE;
8932
8933 src_align = get_pointer_alignment (src);
8934 dest_align = get_pointer_alignment (dest);
8935 if (dest_align < TYPE_ALIGN (desttype)
8936 || src_align < TYPE_ALIGN (srctype))
8937 return NULL_TREE;
8938
8939 if (!ignore)
8940 dest = builtin_save_expr (dest);
8941
8942 /* Build accesses at offset zero with a ref-all character type. */
8943 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8944 ptr_mode, true), 0);
8945
8946 destvar = dest;
8947 STRIP_NOPS (destvar);
8948 if (TREE_CODE (destvar) == ADDR_EXPR
8949 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8950 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8951 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8952 else
8953 destvar = NULL_TREE;
8954
8955 srcvar = src;
8956 STRIP_NOPS (srcvar);
8957 if (TREE_CODE (srcvar) == ADDR_EXPR
8958 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8959 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8960 {
8961 if (!destvar
8962 || src_align >= TYPE_ALIGN (desttype))
8963 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8964 srcvar, off0);
8965 else if (!STRICT_ALIGNMENT)
8966 {
8967 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8968 src_align);
8969 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8970 }
8971 else
8972 srcvar = NULL_TREE;
8973 }
8974 else
8975 srcvar = NULL_TREE;
8976
8977 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8978 return NULL_TREE;
8979
8980 if (srcvar == NULL_TREE)
8981 {
8982 STRIP_NOPS (src);
8983 if (src_align >= TYPE_ALIGN (desttype))
8984 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8985 else
8986 {
8987 if (STRICT_ALIGNMENT)
8988 return NULL_TREE;
8989 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8990 src_align);
8991 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8992 }
8993 }
8994 else if (destvar == NULL_TREE)
8995 {
8996 STRIP_NOPS (dest);
8997 if (dest_align >= TYPE_ALIGN (srctype))
8998 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8999 else
9000 {
9001 if (STRICT_ALIGNMENT)
9002 return NULL_TREE;
9003 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9004 dest_align);
9005 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9006 }
9007 }
9008
9009 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9010 }
9011
9012 if (ignore)
9013 return expr;
9014
9015 if (endp == 0 || endp == 3)
9016 return omit_one_operand_loc (loc, type, dest, expr);
9017
9018 if (expr == len)
9019 expr = NULL_TREE;
9020
9021 if (endp == 2)
9022 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9023 ssize_int (1));
9024
9025 dest = fold_build_pointer_plus_loc (loc, dest, len);
9026 dest = fold_convert_loc (loc, type, dest);
9027 if (expr)
9028 dest = omit_one_operand_loc (loc, type, dest, expr);
9029 return dest;
9030 }
9031
9032 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9033 If LEN is not NULL, it represents the length of the string to be
9034 copied. Return NULL_TREE if no simplification can be made. */
9035
9036 tree
9037 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9038 {
9039 tree fn;
9040
9041 if (!validate_arg (dest, POINTER_TYPE)
9042 || !validate_arg (src, POINTER_TYPE))
9043 return NULL_TREE;
9044
9045 /* If SRC and DEST are the same (and not volatile), return DEST. */
9046 if (operand_equal_p (src, dest, 0))
9047 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9048
9049 if (optimize_function_for_size_p (cfun))
9050 return NULL_TREE;
9051
9052 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9053 if (!fn)
9054 return NULL_TREE;
9055
9056 if (!len)
9057 {
9058 len = c_strlen (src, 1);
9059 if (! len || TREE_SIDE_EFFECTS (len))
9060 return NULL_TREE;
9061 }
9062
9063 len = fold_convert_loc (loc, size_type_node, len);
9064 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9065 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9066 build_call_expr_loc (loc, fn, 3, dest, src, len));
9067 }
9068
9069 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9070 Return NULL_TREE if no simplification can be made. */
9071
9072 static tree
9073 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9074 {
9075 tree fn, len, lenp1, call, type;
9076
9077 if (!validate_arg (dest, POINTER_TYPE)
9078 || !validate_arg (src, POINTER_TYPE))
9079 return NULL_TREE;
9080
9081 len = c_strlen (src, 1);
9082 if (!len
9083 || TREE_CODE (len) != INTEGER_CST)
9084 return NULL_TREE;
9085
9086 if (optimize_function_for_size_p (cfun)
9087 /* If length is zero it's small enough. */
9088 && !integer_zerop (len))
9089 return NULL_TREE;
9090
9091 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9092 if (!fn)
9093 return NULL_TREE;
9094
9095 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9096 fold_convert_loc (loc, size_type_node, len),
9097 build_int_cst (size_type_node, 1));
9098 /* We use dest twice in building our expression. Save it from
9099 multiple expansions. */
9100 dest = builtin_save_expr (dest);
9101 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9102
9103 type = TREE_TYPE (TREE_TYPE (fndecl));
9104 dest = fold_build_pointer_plus_loc (loc, dest, len);
9105 dest = fold_convert_loc (loc, type, dest);
9106 dest = omit_one_operand_loc (loc, type, dest, call);
9107 return dest;
9108 }
9109
9110 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9111 If SLEN is not NULL, it represents the length of the source string.
9112 Return NULL_TREE if no simplification can be made. */
9113
9114 tree
9115 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9116 tree src, tree len, tree slen)
9117 {
9118 tree fn;
9119
9120 if (!validate_arg (dest, POINTER_TYPE)
9121 || !validate_arg (src, POINTER_TYPE)
9122 || !validate_arg (len, INTEGER_TYPE))
9123 return NULL_TREE;
9124
9125 /* If the LEN parameter is zero, return DEST. */
9126 if (integer_zerop (len))
9127 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9128
9129 /* We can't compare slen with len as constants below if len is not a
9130 constant. */
9131 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9132 return NULL_TREE;
9133
9134 if (!slen)
9135 slen = c_strlen (src, 1);
9136
9137 /* Now, we must be passed a constant src ptr parameter. */
9138 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9139 return NULL_TREE;
9140
9141 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9142
9143 /* We do not support simplification of this case, though we do
9144 support it when expanding trees into RTL. */
9145 /* FIXME: generate a call to __builtin_memset. */
9146 if (tree_int_cst_lt (slen, len))
9147 return NULL_TREE;
9148
9149 /* OK transform into builtin memcpy. */
9150 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9151 if (!fn)
9152 return NULL_TREE;
9153
9154 len = fold_convert_loc (loc, size_type_node, len);
9155 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9156 build_call_expr_loc (loc, fn, 3, dest, src, len));
9157 }
9158
9159 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9160 arguments to the call, and TYPE is its return type.
9161 Return NULL_TREE if no simplification can be made. */
9162
9163 static tree
9164 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9165 {
9166 if (!validate_arg (arg1, POINTER_TYPE)
9167 || !validate_arg (arg2, INTEGER_TYPE)
9168 || !validate_arg (len, INTEGER_TYPE))
9169 return NULL_TREE;
9170 else
9171 {
9172 const char *p1;
9173
9174 if (TREE_CODE (arg2) != INTEGER_CST
9175 || !host_integerp (len, 1))
9176 return NULL_TREE;
9177
9178 p1 = c_getstr (arg1);
9179 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9180 {
9181 char c;
9182 const char *r;
9183 tree tem;
9184
9185 if (target_char_cast (arg2, &c))
9186 return NULL_TREE;
9187
9188 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9189
9190 if (r == NULL)
9191 return build_int_cst (TREE_TYPE (arg1), 0);
9192
9193 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9194 return fold_convert_loc (loc, type, tem);
9195 }
9196 return NULL_TREE;
9197 }
9198 }
9199
9200 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9201 Return NULL_TREE if no simplification can be made. */
9202
9203 static tree
9204 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9205 {
9206 const char *p1, *p2;
9207
9208 if (!validate_arg (arg1, POINTER_TYPE)
9209 || !validate_arg (arg2, POINTER_TYPE)
9210 || !validate_arg (len, INTEGER_TYPE))
9211 return NULL_TREE;
9212
9213 /* If the LEN parameter is zero, return zero. */
9214 if (integer_zerop (len))
9215 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9216 arg1, arg2);
9217
9218 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9219 if (operand_equal_p (arg1, arg2, 0))
9220 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9221
9222 p1 = c_getstr (arg1);
9223 p2 = c_getstr (arg2);
9224
9225 /* If all arguments are constant, and the value of len is not greater
9226 than the lengths of arg1 and arg2, evaluate at compile-time. */
9227 if (host_integerp (len, 1) && p1 && p2
9228 && compare_tree_int (len, strlen (p1) + 1) <= 0
9229 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9230 {
9231 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9232
9233 if (r > 0)
9234 return integer_one_node;
9235 else if (r < 0)
9236 return integer_minus_one_node;
9237 else
9238 return integer_zero_node;
9239 }
9240
9241 /* If len parameter is one, return an expression corresponding to
9242 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9243 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9244 {
9245 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9246 tree cst_uchar_ptr_node
9247 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9248
9249 tree ind1
9250 = fold_convert_loc (loc, integer_type_node,
9251 build1 (INDIRECT_REF, cst_uchar_node,
9252 fold_convert_loc (loc,
9253 cst_uchar_ptr_node,
9254 arg1)));
9255 tree ind2
9256 = fold_convert_loc (loc, integer_type_node,
9257 build1 (INDIRECT_REF, cst_uchar_node,
9258 fold_convert_loc (loc,
9259 cst_uchar_ptr_node,
9260 arg2)));
9261 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9262 }
9263
9264 return NULL_TREE;
9265 }
9266
9267 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9268 Return NULL_TREE if no simplification can be made. */
9269
9270 static tree
9271 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9272 {
9273 const char *p1, *p2;
9274
9275 if (!validate_arg (arg1, POINTER_TYPE)
9276 || !validate_arg (arg2, POINTER_TYPE))
9277 return NULL_TREE;
9278
9279 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9280 if (operand_equal_p (arg1, arg2, 0))
9281 return integer_zero_node;
9282
9283 p1 = c_getstr (arg1);
9284 p2 = c_getstr (arg2);
9285
9286 if (p1 && p2)
9287 {
9288 const int i = strcmp (p1, p2);
9289 if (i < 0)
9290 return integer_minus_one_node;
9291 else if (i > 0)
9292 return integer_one_node;
9293 else
9294 return integer_zero_node;
9295 }
9296
9297 /* If the second arg is "", return *(const unsigned char*)arg1. */
9298 if (p2 && *p2 == '\0')
9299 {
9300 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9301 tree cst_uchar_ptr_node
9302 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9303
9304 return fold_convert_loc (loc, integer_type_node,
9305 build1 (INDIRECT_REF, cst_uchar_node,
9306 fold_convert_loc (loc,
9307 cst_uchar_ptr_node,
9308 arg1)));
9309 }
9310
9311 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9312 if (p1 && *p1 == '\0')
9313 {
9314 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9315 tree cst_uchar_ptr_node
9316 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9317
9318 tree temp
9319 = fold_convert_loc (loc, integer_type_node,
9320 build1 (INDIRECT_REF, cst_uchar_node,
9321 fold_convert_loc (loc,
9322 cst_uchar_ptr_node,
9323 arg2)));
9324 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9325 }
9326
9327 return NULL_TREE;
9328 }
9329
9330 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9331 Return NULL_TREE if no simplification can be made. */
9332
9333 static tree
9334 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9335 {
9336 const char *p1, *p2;
9337
9338 if (!validate_arg (arg1, POINTER_TYPE)
9339 || !validate_arg (arg2, POINTER_TYPE)
9340 || !validate_arg (len, INTEGER_TYPE))
9341 return NULL_TREE;
9342
9343 /* If the LEN parameter is zero, return zero. */
9344 if (integer_zerop (len))
9345 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9346 arg1, arg2);
9347
9348 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9349 if (operand_equal_p (arg1, arg2, 0))
9350 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9351
9352 p1 = c_getstr (arg1);
9353 p2 = c_getstr (arg2);
9354
9355 if (host_integerp (len, 1) && p1 && p2)
9356 {
9357 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9358 if (i > 0)
9359 return integer_one_node;
9360 else if (i < 0)
9361 return integer_minus_one_node;
9362 else
9363 return integer_zero_node;
9364 }
9365
9366 /* If the second arg is "", and the length is greater than zero,
9367 return *(const unsigned char*)arg1. */
9368 if (p2 && *p2 == '\0'
9369 && TREE_CODE (len) == INTEGER_CST
9370 && tree_int_cst_sgn (len) == 1)
9371 {
9372 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9373 tree cst_uchar_ptr_node
9374 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9375
9376 return fold_convert_loc (loc, integer_type_node,
9377 build1 (INDIRECT_REF, cst_uchar_node,
9378 fold_convert_loc (loc,
9379 cst_uchar_ptr_node,
9380 arg1)));
9381 }
9382
9383 /* If the first arg is "", and the length is greater than zero,
9384 return -*(const unsigned char*)arg2. */
9385 if (p1 && *p1 == '\0'
9386 && TREE_CODE (len) == INTEGER_CST
9387 && tree_int_cst_sgn (len) == 1)
9388 {
9389 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9390 tree cst_uchar_ptr_node
9391 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9392
9393 tree temp = fold_convert_loc (loc, integer_type_node,
9394 build1 (INDIRECT_REF, cst_uchar_node,
9395 fold_convert_loc (loc,
9396 cst_uchar_ptr_node,
9397 arg2)));
9398 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9399 }
9400
9401 /* If len parameter is one, return an expression corresponding to
9402 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9403 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9404 {
9405 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9406 tree cst_uchar_ptr_node
9407 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9408
9409 tree ind1 = fold_convert_loc (loc, integer_type_node,
9410 build1 (INDIRECT_REF, cst_uchar_node,
9411 fold_convert_loc (loc,
9412 cst_uchar_ptr_node,
9413 arg1)));
9414 tree ind2 = fold_convert_loc (loc, integer_type_node,
9415 build1 (INDIRECT_REF, cst_uchar_node,
9416 fold_convert_loc (loc,
9417 cst_uchar_ptr_node,
9418 arg2)));
9419 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9420 }
9421
9422 return NULL_TREE;
9423 }
9424
9425 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9426 ARG. Return NULL_TREE if no simplification can be made. */
9427
9428 static tree
9429 fold_builtin_signbit (location_t loc, tree arg, tree type)
9430 {
9431 if (!validate_arg (arg, REAL_TYPE))
9432 return NULL_TREE;
9433
9434 /* If ARG is a compile-time constant, determine the result. */
9435 if (TREE_CODE (arg) == REAL_CST
9436 && !TREE_OVERFLOW (arg))
9437 {
9438 REAL_VALUE_TYPE c;
9439
9440 c = TREE_REAL_CST (arg);
9441 return (REAL_VALUE_NEGATIVE (c)
9442 ? build_one_cst (type)
9443 : build_zero_cst (type));
9444 }
9445
9446 /* If ARG is non-negative, the result is always zero. */
9447 if (tree_expr_nonnegative_p (arg))
9448 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9449
9450 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9451 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9452 return fold_convert (type,
9453 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9454 build_real (TREE_TYPE (arg), dconst0)));
9455
9456 return NULL_TREE;
9457 }
9458
9459 /* Fold function call to builtin copysign, copysignf or copysignl with
9460 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9461 be made. */
9462
9463 static tree
9464 fold_builtin_copysign (location_t loc, tree fndecl,
9465 tree arg1, tree arg2, tree type)
9466 {
9467 tree tem;
9468
9469 if (!validate_arg (arg1, REAL_TYPE)
9470 || !validate_arg (arg2, REAL_TYPE))
9471 return NULL_TREE;
9472
9473 /* copysign(X,X) is X. */
9474 if (operand_equal_p (arg1, arg2, 0))
9475 return fold_convert_loc (loc, type, arg1);
9476
9477 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9478 if (TREE_CODE (arg1) == REAL_CST
9479 && TREE_CODE (arg2) == REAL_CST
9480 && !TREE_OVERFLOW (arg1)
9481 && !TREE_OVERFLOW (arg2))
9482 {
9483 REAL_VALUE_TYPE c1, c2;
9484
9485 c1 = TREE_REAL_CST (arg1);
9486 c2 = TREE_REAL_CST (arg2);
9487 /* c1.sign := c2.sign. */
9488 real_copysign (&c1, &c2);
9489 return build_real (type, c1);
9490 }
9491
9492 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9493 Remember to evaluate Y for side-effects. */
9494 if (tree_expr_nonnegative_p (arg2))
9495 return omit_one_operand_loc (loc, type,
9496 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9497 arg2);
9498
9499 /* Strip sign changing operations for the first argument. */
9500 tem = fold_strip_sign_ops (arg1);
9501 if (tem)
9502 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9503
9504 return NULL_TREE;
9505 }
9506
9507 /* Fold a call to builtin isascii with argument ARG. */
9508
9509 static tree
9510 fold_builtin_isascii (location_t loc, tree arg)
9511 {
9512 if (!validate_arg (arg, INTEGER_TYPE))
9513 return NULL_TREE;
9514 else
9515 {
9516 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9517 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9518 build_int_cst (integer_type_node,
9519 ~ (unsigned HOST_WIDE_INT) 0x7f));
9520 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9521 arg, integer_zero_node);
9522 }
9523 }
9524
9525 /* Fold a call to builtin toascii with argument ARG. */
9526
9527 static tree
9528 fold_builtin_toascii (location_t loc, tree arg)
9529 {
9530 if (!validate_arg (arg, INTEGER_TYPE))
9531 return NULL_TREE;
9532
9533 /* Transform toascii(c) -> (c & 0x7f). */
9534 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9535 build_int_cst (integer_type_node, 0x7f));
9536 }
9537
9538 /* Fold a call to builtin isdigit with argument ARG. */
9539
9540 static tree
9541 fold_builtin_isdigit (location_t loc, tree arg)
9542 {
9543 if (!validate_arg (arg, INTEGER_TYPE))
9544 return NULL_TREE;
9545 else
9546 {
9547 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9548 /* According to the C standard, isdigit is unaffected by locale.
9549 However, it definitely is affected by the target character set. */
9550 unsigned HOST_WIDE_INT target_digit0
9551 = lang_hooks.to_target_charset ('0');
9552
9553 if (target_digit0 == 0)
9554 return NULL_TREE;
9555
9556 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9557 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9558 build_int_cst (unsigned_type_node, target_digit0));
9559 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9560 build_int_cst (unsigned_type_node, 9));
9561 }
9562 }
9563
9564 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9565
9566 static tree
9567 fold_builtin_fabs (location_t loc, tree arg, tree type)
9568 {
9569 if (!validate_arg (arg, REAL_TYPE))
9570 return NULL_TREE;
9571
9572 arg = fold_convert_loc (loc, type, arg);
9573 if (TREE_CODE (arg) == REAL_CST)
9574 return fold_abs_const (arg, type);
9575 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9576 }
9577
9578 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9579
9580 static tree
9581 fold_builtin_abs (location_t loc, tree arg, tree type)
9582 {
9583 if (!validate_arg (arg, INTEGER_TYPE))
9584 return NULL_TREE;
9585
9586 arg = fold_convert_loc (loc, type, arg);
9587 if (TREE_CODE (arg) == INTEGER_CST)
9588 return fold_abs_const (arg, type);
9589 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9590 }
9591
9592 /* Fold a fma operation with arguments ARG[012]. */
9593
9594 tree
9595 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9596 tree type, tree arg0, tree arg1, tree arg2)
9597 {
9598 if (TREE_CODE (arg0) == REAL_CST
9599 && TREE_CODE (arg1) == REAL_CST
9600 && TREE_CODE (arg2) == REAL_CST)
9601 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9602
9603 return NULL_TREE;
9604 }
9605
9606 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9607
9608 static tree
9609 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9610 {
9611 if (validate_arg (arg0, REAL_TYPE)
9612 && validate_arg(arg1, REAL_TYPE)
9613 && validate_arg(arg2, REAL_TYPE))
9614 {
9615 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9616 if (tem)
9617 return tem;
9618
9619 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9620 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9621 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9622 }
9623 return NULL_TREE;
9624 }
9625
9626 /* Fold a call to builtin fmin or fmax. */
9627
9628 static tree
9629 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9630 tree type, bool max)
9631 {
9632 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9633 {
9634 /* Calculate the result when the argument is a constant. */
9635 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9636
9637 if (res)
9638 return res;
9639
9640 /* If either argument is NaN, return the other one. Avoid the
9641 transformation if we get (and honor) a signalling NaN. Using
9642 omit_one_operand() ensures we create a non-lvalue. */
9643 if (TREE_CODE (arg0) == REAL_CST
9644 && real_isnan (&TREE_REAL_CST (arg0))
9645 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9646 || ! TREE_REAL_CST (arg0).signalling))
9647 return omit_one_operand_loc (loc, type, arg1, arg0);
9648 if (TREE_CODE (arg1) == REAL_CST
9649 && real_isnan (&TREE_REAL_CST (arg1))
9650 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9651 || ! TREE_REAL_CST (arg1).signalling))
9652 return omit_one_operand_loc (loc, type, arg0, arg1);
9653
9654 /* Transform fmin/fmax(x,x) -> x. */
9655 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9656 return omit_one_operand_loc (loc, type, arg0, arg1);
9657
9658 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9659 functions to return the numeric arg if the other one is NaN.
9660 These tree codes don't honor that, so only transform if
9661 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9662 handled, so we don't have to worry about it either. */
9663 if (flag_finite_math_only)
9664 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9665 fold_convert_loc (loc, type, arg0),
9666 fold_convert_loc (loc, type, arg1));
9667 }
9668 return NULL_TREE;
9669 }
9670
9671 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9672
9673 static tree
9674 fold_builtin_carg (location_t loc, tree arg, tree type)
9675 {
9676 if (validate_arg (arg, COMPLEX_TYPE)
9677 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9678 {
9679 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9680
9681 if (atan2_fn)
9682 {
9683 tree new_arg = builtin_save_expr (arg);
9684 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9685 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9686 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9687 }
9688 }
9689
9690 return NULL_TREE;
9691 }
9692
9693 /* Fold a call to builtin logb/ilogb. */
9694
9695 static tree
9696 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9697 {
9698 if (! validate_arg (arg, REAL_TYPE))
9699 return NULL_TREE;
9700
9701 STRIP_NOPS (arg);
9702
9703 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9704 {
9705 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9706
9707 switch (value->cl)
9708 {
9709 case rvc_nan:
9710 case rvc_inf:
9711 /* If arg is Inf or NaN and we're logb, return it. */
9712 if (TREE_CODE (rettype) == REAL_TYPE)
9713 {
9714 /* For logb(-Inf) we have to return +Inf. */
9715 if (real_isinf (value) && real_isneg (value))
9716 {
9717 REAL_VALUE_TYPE tem;
9718 real_inf (&tem);
9719 return build_real (rettype, tem);
9720 }
9721 return fold_convert_loc (loc, rettype, arg);
9722 }
9723 /* Fall through... */
9724 case rvc_zero:
9725 /* Zero may set errno and/or raise an exception for logb, also
9726 for ilogb we don't know FP_ILOGB0. */
9727 return NULL_TREE;
9728 case rvc_normal:
9729 /* For normal numbers, proceed iff radix == 2. In GCC,
9730 normalized significands are in the range [0.5, 1.0). We
9731 want the exponent as if they were [1.0, 2.0) so get the
9732 exponent and subtract 1. */
9733 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9734 return fold_convert_loc (loc, rettype,
9735 build_int_cst (integer_type_node,
9736 REAL_EXP (value)-1));
9737 break;
9738 }
9739 }
9740
9741 return NULL_TREE;
9742 }
9743
9744 /* Fold a call to builtin significand, if radix == 2. */
9745
9746 static tree
9747 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9748 {
9749 if (! validate_arg (arg, REAL_TYPE))
9750 return NULL_TREE;
9751
9752 STRIP_NOPS (arg);
9753
9754 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9755 {
9756 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9757
9758 switch (value->cl)
9759 {
9760 case rvc_zero:
9761 case rvc_nan:
9762 case rvc_inf:
9763 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9764 return fold_convert_loc (loc, rettype, arg);
9765 case rvc_normal:
9766 /* For normal numbers, proceed iff radix == 2. */
9767 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9768 {
9769 REAL_VALUE_TYPE result = *value;
9770 /* In GCC, normalized significands are in the range [0.5,
9771 1.0). We want them to be [1.0, 2.0) so set the
9772 exponent to 1. */
9773 SET_REAL_EXP (&result, 1);
9774 return build_real (rettype, result);
9775 }
9776 break;
9777 }
9778 }
9779
9780 return NULL_TREE;
9781 }
9782
9783 /* Fold a call to builtin frexp, we can assume the base is 2. */
9784
9785 static tree
9786 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9787 {
9788 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9789 return NULL_TREE;
9790
9791 STRIP_NOPS (arg0);
9792
9793 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9794 return NULL_TREE;
9795
9796 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9797
9798 /* Proceed if a valid pointer type was passed in. */
9799 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9800 {
9801 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9802 tree frac, exp;
9803
9804 switch (value->cl)
9805 {
9806 case rvc_zero:
9807 /* For +-0, return (*exp = 0, +-0). */
9808 exp = integer_zero_node;
9809 frac = arg0;
9810 break;
9811 case rvc_nan:
9812 case rvc_inf:
9813 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9814 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9815 case rvc_normal:
9816 {
9817 /* Since the frexp function always expects base 2, and in
9818 GCC normalized significands are already in the range
9819 [0.5, 1.0), we have exactly what frexp wants. */
9820 REAL_VALUE_TYPE frac_rvt = *value;
9821 SET_REAL_EXP (&frac_rvt, 0);
9822 frac = build_real (rettype, frac_rvt);
9823 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9824 }
9825 break;
9826 default:
9827 gcc_unreachable ();
9828 }
9829
9830 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9831 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9832 TREE_SIDE_EFFECTS (arg1) = 1;
9833 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9834 }
9835
9836 return NULL_TREE;
9837 }
9838
9839 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9840 then we can assume the base is two. If it's false, then we have to
9841 check the mode of the TYPE parameter in certain cases. */
9842
9843 static tree
9844 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9845 tree type, bool ldexp)
9846 {
9847 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9848 {
9849 STRIP_NOPS (arg0);
9850 STRIP_NOPS (arg1);
9851
9852 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9853 if (real_zerop (arg0) || integer_zerop (arg1)
9854 || (TREE_CODE (arg0) == REAL_CST
9855 && !real_isfinite (&TREE_REAL_CST (arg0))))
9856 return omit_one_operand_loc (loc, type, arg0, arg1);
9857
9858 /* If both arguments are constant, then try to evaluate it. */
9859 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9860 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9861 && host_integerp (arg1, 0))
9862 {
9863 /* Bound the maximum adjustment to twice the range of the
9864 mode's valid exponents. Use abs to ensure the range is
9865 positive as a sanity check. */
9866 const long max_exp_adj = 2 *
9867 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9868 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9869
9870 /* Get the user-requested adjustment. */
9871 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9872
9873 /* The requested adjustment must be inside this range. This
9874 is a preliminary cap to avoid things like overflow, we
9875 may still fail to compute the result for other reasons. */
9876 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9877 {
9878 REAL_VALUE_TYPE initial_result;
9879
9880 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9881
9882 /* Ensure we didn't overflow. */
9883 if (! real_isinf (&initial_result))
9884 {
9885 const REAL_VALUE_TYPE trunc_result
9886 = real_value_truncate (TYPE_MODE (type), initial_result);
9887
9888 /* Only proceed if the target mode can hold the
9889 resulting value. */
9890 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9891 return build_real (type, trunc_result);
9892 }
9893 }
9894 }
9895 }
9896
9897 return NULL_TREE;
9898 }
9899
9900 /* Fold a call to builtin modf. */
9901
9902 static tree
9903 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9904 {
9905 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9906 return NULL_TREE;
9907
9908 STRIP_NOPS (arg0);
9909
9910 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9911 return NULL_TREE;
9912
9913 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9914
9915 /* Proceed if a valid pointer type was passed in. */
9916 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9917 {
9918 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9919 REAL_VALUE_TYPE trunc, frac;
9920
9921 switch (value->cl)
9922 {
9923 case rvc_nan:
9924 case rvc_zero:
9925 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9926 trunc = frac = *value;
9927 break;
9928 case rvc_inf:
9929 /* For +-Inf, return (*arg1 = arg0, +-0). */
9930 frac = dconst0;
9931 frac.sign = value->sign;
9932 trunc = *value;
9933 break;
9934 case rvc_normal:
9935 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9936 real_trunc (&trunc, VOIDmode, value);
9937 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9938 /* If the original number was negative and already
9939 integral, then the fractional part is -0.0. */
9940 if (value->sign && frac.cl == rvc_zero)
9941 frac.sign = value->sign;
9942 break;
9943 }
9944
9945 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9946 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9947 build_real (rettype, trunc));
9948 TREE_SIDE_EFFECTS (arg1) = 1;
9949 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9950 build_real (rettype, frac));
9951 }
9952
9953 return NULL_TREE;
9954 }
9955
9956 /* Given a location LOC, an interclass builtin function decl FNDECL
9957 and its single argument ARG, return an folded expression computing
9958 the same, or NULL_TREE if we either couldn't or didn't want to fold
9959 (the latter happen if there's an RTL instruction available). */
9960
9961 static tree
9962 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9963 {
9964 enum machine_mode mode;
9965
9966 if (!validate_arg (arg, REAL_TYPE))
9967 return NULL_TREE;
9968
9969 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9970 return NULL_TREE;
9971
9972 mode = TYPE_MODE (TREE_TYPE (arg));
9973
9974 /* If there is no optab, try generic code. */
9975 switch (DECL_FUNCTION_CODE (fndecl))
9976 {
9977 tree result;
9978
9979 CASE_FLT_FN (BUILT_IN_ISINF):
9980 {
9981 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9982 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9983 tree const type = TREE_TYPE (arg);
9984 REAL_VALUE_TYPE r;
9985 char buf[128];
9986
9987 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9988 real_from_string (&r, buf);
9989 result = build_call_expr (isgr_fn, 2,
9990 fold_build1_loc (loc, ABS_EXPR, type, arg),
9991 build_real (type, r));
9992 return result;
9993 }
9994 CASE_FLT_FN (BUILT_IN_FINITE):
9995 case BUILT_IN_ISFINITE:
9996 {
9997 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9998 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9999 tree const type = TREE_TYPE (arg);
10000 REAL_VALUE_TYPE r;
10001 char buf[128];
10002
10003 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10004 real_from_string (&r, buf);
10005 result = build_call_expr (isle_fn, 2,
10006 fold_build1_loc (loc, ABS_EXPR, type, arg),
10007 build_real (type, r));
10008 /*result = fold_build2_loc (loc, UNGT_EXPR,
10009 TREE_TYPE (TREE_TYPE (fndecl)),
10010 fold_build1_loc (loc, ABS_EXPR, type, arg),
10011 build_real (type, r));
10012 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10013 TREE_TYPE (TREE_TYPE (fndecl)),
10014 result);*/
10015 return result;
10016 }
10017 case BUILT_IN_ISNORMAL:
10018 {
10019 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10020 islessequal(fabs(x),DBL_MAX). */
10021 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10022 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10023 tree const type = TREE_TYPE (arg);
10024 REAL_VALUE_TYPE rmax, rmin;
10025 char buf[128];
10026
10027 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10028 real_from_string (&rmax, buf);
10029 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10030 real_from_string (&rmin, buf);
10031 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10032 result = build_call_expr (isle_fn, 2, arg,
10033 build_real (type, rmax));
10034 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10035 build_call_expr (isge_fn, 2, arg,
10036 build_real (type, rmin)));
10037 return result;
10038 }
10039 default:
10040 break;
10041 }
10042
10043 return NULL_TREE;
10044 }
10045
10046 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10047 ARG is the argument for the call. */
10048
10049 static tree
10050 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10051 {
10052 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10053 REAL_VALUE_TYPE r;
10054
10055 if (!validate_arg (arg, REAL_TYPE))
10056 return NULL_TREE;
10057
10058 switch (builtin_index)
10059 {
10060 case BUILT_IN_ISINF:
10061 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10062 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10063
10064 if (TREE_CODE (arg) == REAL_CST)
10065 {
10066 r = TREE_REAL_CST (arg);
10067 if (real_isinf (&r))
10068 return real_compare (GT_EXPR, &r, &dconst0)
10069 ? integer_one_node : integer_minus_one_node;
10070 else
10071 return integer_zero_node;
10072 }
10073
10074 return NULL_TREE;
10075
10076 case BUILT_IN_ISINF_SIGN:
10077 {
10078 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10079 /* In a boolean context, GCC will fold the inner COND_EXPR to
10080 1. So e.g. "if (isinf_sign(x))" would be folded to just
10081 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10082 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10083 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10084 tree tmp = NULL_TREE;
10085
10086 arg = builtin_save_expr (arg);
10087
10088 if (signbit_fn && isinf_fn)
10089 {
10090 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10091 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10092
10093 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10094 signbit_call, integer_zero_node);
10095 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10096 isinf_call, integer_zero_node);
10097
10098 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10099 integer_minus_one_node, integer_one_node);
10100 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10101 isinf_call, tmp,
10102 integer_zero_node);
10103 }
10104
10105 return tmp;
10106 }
10107
10108 case BUILT_IN_ISFINITE:
10109 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10110 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10111 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10112
10113 if (TREE_CODE (arg) == REAL_CST)
10114 {
10115 r = TREE_REAL_CST (arg);
10116 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10117 }
10118
10119 return NULL_TREE;
10120
10121 case BUILT_IN_ISNAN:
10122 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10123 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10124
10125 if (TREE_CODE (arg) == REAL_CST)
10126 {
10127 r = TREE_REAL_CST (arg);
10128 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10129 }
10130
10131 arg = builtin_save_expr (arg);
10132 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10133
10134 default:
10135 gcc_unreachable ();
10136 }
10137 }
10138
10139 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10140 This builtin will generate code to return the appropriate floating
10141 point classification depending on the value of the floating point
10142 number passed in. The possible return values must be supplied as
10143 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10144 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10145 one floating point argument which is "type generic". */
10146
10147 static tree
10148 fold_builtin_fpclassify (location_t loc, tree exp)
10149 {
10150 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10151 arg, type, res, tmp;
10152 enum machine_mode mode;
10153 REAL_VALUE_TYPE r;
10154 char buf[128];
10155
10156 /* Verify the required arguments in the original call. */
10157 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10158 INTEGER_TYPE, INTEGER_TYPE,
10159 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10160 return NULL_TREE;
10161
10162 fp_nan = CALL_EXPR_ARG (exp, 0);
10163 fp_infinite = CALL_EXPR_ARG (exp, 1);
10164 fp_normal = CALL_EXPR_ARG (exp, 2);
10165 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10166 fp_zero = CALL_EXPR_ARG (exp, 4);
10167 arg = CALL_EXPR_ARG (exp, 5);
10168 type = TREE_TYPE (arg);
10169 mode = TYPE_MODE (type);
10170 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10171
10172 /* fpclassify(x) ->
10173 isnan(x) ? FP_NAN :
10174 (fabs(x) == Inf ? FP_INFINITE :
10175 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10176 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10177
10178 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10179 build_real (type, dconst0));
10180 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10181 tmp, fp_zero, fp_subnormal);
10182
10183 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10184 real_from_string (&r, buf);
10185 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10186 arg, build_real (type, r));
10187 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10188
10189 if (HONOR_INFINITIES (mode))
10190 {
10191 real_inf (&r);
10192 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10193 build_real (type, r));
10194 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10195 fp_infinite, res);
10196 }
10197
10198 if (HONOR_NANS (mode))
10199 {
10200 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10201 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10202 }
10203
10204 return res;
10205 }
10206
10207 /* Fold a call to an unordered comparison function such as
10208 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10209 being called and ARG0 and ARG1 are the arguments for the call.
10210 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10211 the opposite of the desired result. UNORDERED_CODE is used
10212 for modes that can hold NaNs and ORDERED_CODE is used for
10213 the rest. */
10214
10215 static tree
10216 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10217 enum tree_code unordered_code,
10218 enum tree_code ordered_code)
10219 {
10220 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10221 enum tree_code code;
10222 tree type0, type1;
10223 enum tree_code code0, code1;
10224 tree cmp_type = NULL_TREE;
10225
10226 type0 = TREE_TYPE (arg0);
10227 type1 = TREE_TYPE (arg1);
10228
10229 code0 = TREE_CODE (type0);
10230 code1 = TREE_CODE (type1);
10231
10232 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10233 /* Choose the wider of two real types. */
10234 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10235 ? type0 : type1;
10236 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10237 cmp_type = type0;
10238 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10239 cmp_type = type1;
10240
10241 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10242 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10243
10244 if (unordered_code == UNORDERED_EXPR)
10245 {
10246 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10247 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10248 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10249 }
10250
10251 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10252 : ordered_code;
10253 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10254 fold_build2_loc (loc, code, type, arg0, arg1));
10255 }
10256
10257 /* Fold a call to built-in function FNDECL with 0 arguments.
10258 IGNORE is true if the result of the function call is ignored. This
10259 function returns NULL_TREE if no simplification was possible. */
10260
10261 static tree
10262 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10263 {
10264 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10265 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10266 switch (fcode)
10267 {
10268 CASE_FLT_FN (BUILT_IN_INF):
10269 case BUILT_IN_INFD32:
10270 case BUILT_IN_INFD64:
10271 case BUILT_IN_INFD128:
10272 return fold_builtin_inf (loc, type, true);
10273
10274 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10275 return fold_builtin_inf (loc, type, false);
10276
10277 case BUILT_IN_CLASSIFY_TYPE:
10278 return fold_builtin_classify_type (NULL_TREE);
10279
10280 default:
10281 break;
10282 }
10283 return NULL_TREE;
10284 }
10285
10286 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10287 IGNORE is true if the result of the function call is ignored. This
10288 function returns NULL_TREE if no simplification was possible. */
10289
10290 static tree
10291 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10292 {
10293 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10294 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10295 switch (fcode)
10296 {
10297 case BUILT_IN_CONSTANT_P:
10298 {
10299 tree val = fold_builtin_constant_p (arg0);
10300
10301 /* Gimplification will pull the CALL_EXPR for the builtin out of
10302 an if condition. When not optimizing, we'll not CSE it back.
10303 To avoid link error types of regressions, return false now. */
10304 if (!val && !optimize)
10305 val = integer_zero_node;
10306
10307 return val;
10308 }
10309
10310 case BUILT_IN_CLASSIFY_TYPE:
10311 return fold_builtin_classify_type (arg0);
10312
10313 case BUILT_IN_STRLEN:
10314 return fold_builtin_strlen (loc, type, arg0);
10315
10316 CASE_FLT_FN (BUILT_IN_FABS):
10317 return fold_builtin_fabs (loc, arg0, type);
10318
10319 case BUILT_IN_ABS:
10320 case BUILT_IN_LABS:
10321 case BUILT_IN_LLABS:
10322 case BUILT_IN_IMAXABS:
10323 return fold_builtin_abs (loc, arg0, type);
10324
10325 CASE_FLT_FN (BUILT_IN_CONJ):
10326 if (validate_arg (arg0, COMPLEX_TYPE)
10327 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10328 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10329 break;
10330
10331 CASE_FLT_FN (BUILT_IN_CREAL):
10332 if (validate_arg (arg0, COMPLEX_TYPE)
10333 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10334 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10335 break;
10336
10337 CASE_FLT_FN (BUILT_IN_CIMAG):
10338 if (validate_arg (arg0, COMPLEX_TYPE)
10339 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10340 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10341 break;
10342
10343 CASE_FLT_FN (BUILT_IN_CCOS):
10344 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10345
10346 CASE_FLT_FN (BUILT_IN_CCOSH):
10347 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10348
10349 CASE_FLT_FN (BUILT_IN_CPROJ):
10350 return fold_builtin_cproj(loc, arg0, type);
10351
10352 CASE_FLT_FN (BUILT_IN_CSIN):
10353 if (validate_arg (arg0, COMPLEX_TYPE)
10354 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10355 return do_mpc_arg1 (arg0, type, mpc_sin);
10356 break;
10357
10358 CASE_FLT_FN (BUILT_IN_CSINH):
10359 if (validate_arg (arg0, COMPLEX_TYPE)
10360 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10361 return do_mpc_arg1 (arg0, type, mpc_sinh);
10362 break;
10363
10364 CASE_FLT_FN (BUILT_IN_CTAN):
10365 if (validate_arg (arg0, COMPLEX_TYPE)
10366 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10367 return do_mpc_arg1 (arg0, type, mpc_tan);
10368 break;
10369
10370 CASE_FLT_FN (BUILT_IN_CTANH):
10371 if (validate_arg (arg0, COMPLEX_TYPE)
10372 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10373 return do_mpc_arg1 (arg0, type, mpc_tanh);
10374 break;
10375
10376 CASE_FLT_FN (BUILT_IN_CLOG):
10377 if (validate_arg (arg0, COMPLEX_TYPE)
10378 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10379 return do_mpc_arg1 (arg0, type, mpc_log);
10380 break;
10381
10382 CASE_FLT_FN (BUILT_IN_CSQRT):
10383 if (validate_arg (arg0, COMPLEX_TYPE)
10384 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10385 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10386 break;
10387
10388 CASE_FLT_FN (BUILT_IN_CASIN):
10389 if (validate_arg (arg0, COMPLEX_TYPE)
10390 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10391 return do_mpc_arg1 (arg0, type, mpc_asin);
10392 break;
10393
10394 CASE_FLT_FN (BUILT_IN_CACOS):
10395 if (validate_arg (arg0, COMPLEX_TYPE)
10396 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10397 return do_mpc_arg1 (arg0, type, mpc_acos);
10398 break;
10399
10400 CASE_FLT_FN (BUILT_IN_CATAN):
10401 if (validate_arg (arg0, COMPLEX_TYPE)
10402 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10403 return do_mpc_arg1 (arg0, type, mpc_atan);
10404 break;
10405
10406 CASE_FLT_FN (BUILT_IN_CASINH):
10407 if (validate_arg (arg0, COMPLEX_TYPE)
10408 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10409 return do_mpc_arg1 (arg0, type, mpc_asinh);
10410 break;
10411
10412 CASE_FLT_FN (BUILT_IN_CACOSH):
10413 if (validate_arg (arg0, COMPLEX_TYPE)
10414 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10415 return do_mpc_arg1 (arg0, type, mpc_acosh);
10416 break;
10417
10418 CASE_FLT_FN (BUILT_IN_CATANH):
10419 if (validate_arg (arg0, COMPLEX_TYPE)
10420 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10421 return do_mpc_arg1 (arg0, type, mpc_atanh);
10422 break;
10423
10424 CASE_FLT_FN (BUILT_IN_CABS):
10425 return fold_builtin_cabs (loc, arg0, type, fndecl);
10426
10427 CASE_FLT_FN (BUILT_IN_CARG):
10428 return fold_builtin_carg (loc, arg0, type);
10429
10430 CASE_FLT_FN (BUILT_IN_SQRT):
10431 return fold_builtin_sqrt (loc, arg0, type);
10432
10433 CASE_FLT_FN (BUILT_IN_CBRT):
10434 return fold_builtin_cbrt (loc, arg0, type);
10435
10436 CASE_FLT_FN (BUILT_IN_ASIN):
10437 if (validate_arg (arg0, REAL_TYPE))
10438 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10439 &dconstm1, &dconst1, true);
10440 break;
10441
10442 CASE_FLT_FN (BUILT_IN_ACOS):
10443 if (validate_arg (arg0, REAL_TYPE))
10444 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10445 &dconstm1, &dconst1, true);
10446 break;
10447
10448 CASE_FLT_FN (BUILT_IN_ATAN):
10449 if (validate_arg (arg0, REAL_TYPE))
10450 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10451 break;
10452
10453 CASE_FLT_FN (BUILT_IN_ASINH):
10454 if (validate_arg (arg0, REAL_TYPE))
10455 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10456 break;
10457
10458 CASE_FLT_FN (BUILT_IN_ACOSH):
10459 if (validate_arg (arg0, REAL_TYPE))
10460 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10461 &dconst1, NULL, true);
10462 break;
10463
10464 CASE_FLT_FN (BUILT_IN_ATANH):
10465 if (validate_arg (arg0, REAL_TYPE))
10466 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10467 &dconstm1, &dconst1, false);
10468 break;
10469
10470 CASE_FLT_FN (BUILT_IN_SIN):
10471 if (validate_arg (arg0, REAL_TYPE))
10472 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10473 break;
10474
10475 CASE_FLT_FN (BUILT_IN_COS):
10476 return fold_builtin_cos (loc, arg0, type, fndecl);
10477
10478 CASE_FLT_FN (BUILT_IN_TAN):
10479 return fold_builtin_tan (arg0, type);
10480
10481 CASE_FLT_FN (BUILT_IN_CEXP):
10482 return fold_builtin_cexp (loc, arg0, type);
10483
10484 CASE_FLT_FN (BUILT_IN_CEXPI):
10485 if (validate_arg (arg0, REAL_TYPE))
10486 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10487 break;
10488
10489 CASE_FLT_FN (BUILT_IN_SINH):
10490 if (validate_arg (arg0, REAL_TYPE))
10491 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10492 break;
10493
10494 CASE_FLT_FN (BUILT_IN_COSH):
10495 return fold_builtin_cosh (loc, arg0, type, fndecl);
10496
10497 CASE_FLT_FN (BUILT_IN_TANH):
10498 if (validate_arg (arg0, REAL_TYPE))
10499 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10500 break;
10501
10502 CASE_FLT_FN (BUILT_IN_ERF):
10503 if (validate_arg (arg0, REAL_TYPE))
10504 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10505 break;
10506
10507 CASE_FLT_FN (BUILT_IN_ERFC):
10508 if (validate_arg (arg0, REAL_TYPE))
10509 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10510 break;
10511
10512 CASE_FLT_FN (BUILT_IN_TGAMMA):
10513 if (validate_arg (arg0, REAL_TYPE))
10514 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10515 break;
10516
10517 CASE_FLT_FN (BUILT_IN_EXP):
10518 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10519
10520 CASE_FLT_FN (BUILT_IN_EXP2):
10521 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10522
10523 CASE_FLT_FN (BUILT_IN_EXP10):
10524 CASE_FLT_FN (BUILT_IN_POW10):
10525 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10526
10527 CASE_FLT_FN (BUILT_IN_EXPM1):
10528 if (validate_arg (arg0, REAL_TYPE))
10529 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10530 break;
10531
10532 CASE_FLT_FN (BUILT_IN_LOG):
10533 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10534
10535 CASE_FLT_FN (BUILT_IN_LOG2):
10536 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10537
10538 CASE_FLT_FN (BUILT_IN_LOG10):
10539 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10540
10541 CASE_FLT_FN (BUILT_IN_LOG1P):
10542 if (validate_arg (arg0, REAL_TYPE))
10543 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10544 &dconstm1, NULL, false);
10545 break;
10546
10547 CASE_FLT_FN (BUILT_IN_J0):
10548 if (validate_arg (arg0, REAL_TYPE))
10549 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10550 NULL, NULL, 0);
10551 break;
10552
10553 CASE_FLT_FN (BUILT_IN_J1):
10554 if (validate_arg (arg0, REAL_TYPE))
10555 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10556 NULL, NULL, 0);
10557 break;
10558
10559 CASE_FLT_FN (BUILT_IN_Y0):
10560 if (validate_arg (arg0, REAL_TYPE))
10561 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10562 &dconst0, NULL, false);
10563 break;
10564
10565 CASE_FLT_FN (BUILT_IN_Y1):
10566 if (validate_arg (arg0, REAL_TYPE))
10567 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10568 &dconst0, NULL, false);
10569 break;
10570
10571 CASE_FLT_FN (BUILT_IN_NAN):
10572 case BUILT_IN_NAND32:
10573 case BUILT_IN_NAND64:
10574 case BUILT_IN_NAND128:
10575 return fold_builtin_nan (arg0, type, true);
10576
10577 CASE_FLT_FN (BUILT_IN_NANS):
10578 return fold_builtin_nan (arg0, type, false);
10579
10580 CASE_FLT_FN (BUILT_IN_FLOOR):
10581 return fold_builtin_floor (loc, fndecl, arg0);
10582
10583 CASE_FLT_FN (BUILT_IN_CEIL):
10584 return fold_builtin_ceil (loc, fndecl, arg0);
10585
10586 CASE_FLT_FN (BUILT_IN_TRUNC):
10587 return fold_builtin_trunc (loc, fndecl, arg0);
10588
10589 CASE_FLT_FN (BUILT_IN_ROUND):
10590 return fold_builtin_round (loc, fndecl, arg0);
10591
10592 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10593 CASE_FLT_FN (BUILT_IN_RINT):
10594 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10595
10596 CASE_FLT_FN (BUILT_IN_ICEIL):
10597 CASE_FLT_FN (BUILT_IN_LCEIL):
10598 CASE_FLT_FN (BUILT_IN_LLCEIL):
10599 CASE_FLT_FN (BUILT_IN_LFLOOR):
10600 CASE_FLT_FN (BUILT_IN_IFLOOR):
10601 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10602 CASE_FLT_FN (BUILT_IN_IROUND):
10603 CASE_FLT_FN (BUILT_IN_LROUND):
10604 CASE_FLT_FN (BUILT_IN_LLROUND):
10605 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10606
10607 CASE_FLT_FN (BUILT_IN_IRINT):
10608 CASE_FLT_FN (BUILT_IN_LRINT):
10609 CASE_FLT_FN (BUILT_IN_LLRINT):
10610 return fold_fixed_mathfn (loc, fndecl, arg0);
10611
10612 case BUILT_IN_BSWAP16:
10613 case BUILT_IN_BSWAP32:
10614 case BUILT_IN_BSWAP64:
10615 return fold_builtin_bswap (fndecl, arg0);
10616
10617 CASE_INT_FN (BUILT_IN_FFS):
10618 CASE_INT_FN (BUILT_IN_CLZ):
10619 CASE_INT_FN (BUILT_IN_CTZ):
10620 CASE_INT_FN (BUILT_IN_CLRSB):
10621 CASE_INT_FN (BUILT_IN_POPCOUNT):
10622 CASE_INT_FN (BUILT_IN_PARITY):
10623 return fold_builtin_bitop (fndecl, arg0);
10624
10625 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10626 return fold_builtin_signbit (loc, arg0, type);
10627
10628 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10629 return fold_builtin_significand (loc, arg0, type);
10630
10631 CASE_FLT_FN (BUILT_IN_ILOGB):
10632 CASE_FLT_FN (BUILT_IN_LOGB):
10633 return fold_builtin_logb (loc, arg0, type);
10634
10635 case BUILT_IN_ISASCII:
10636 return fold_builtin_isascii (loc, arg0);
10637
10638 case BUILT_IN_TOASCII:
10639 return fold_builtin_toascii (loc, arg0);
10640
10641 case BUILT_IN_ISDIGIT:
10642 return fold_builtin_isdigit (loc, arg0);
10643
10644 CASE_FLT_FN (BUILT_IN_FINITE):
10645 case BUILT_IN_FINITED32:
10646 case BUILT_IN_FINITED64:
10647 case BUILT_IN_FINITED128:
10648 case BUILT_IN_ISFINITE:
10649 {
10650 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10651 if (ret)
10652 return ret;
10653 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10654 }
10655
10656 CASE_FLT_FN (BUILT_IN_ISINF):
10657 case BUILT_IN_ISINFD32:
10658 case BUILT_IN_ISINFD64:
10659 case BUILT_IN_ISINFD128:
10660 {
10661 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10662 if (ret)
10663 return ret;
10664 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10665 }
10666
10667 case BUILT_IN_ISNORMAL:
10668 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10669
10670 case BUILT_IN_ISINF_SIGN:
10671 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10672
10673 CASE_FLT_FN (BUILT_IN_ISNAN):
10674 case BUILT_IN_ISNAND32:
10675 case BUILT_IN_ISNAND64:
10676 case BUILT_IN_ISNAND128:
10677 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10678
10679 case BUILT_IN_PRINTF:
10680 case BUILT_IN_PRINTF_UNLOCKED:
10681 case BUILT_IN_VPRINTF:
10682 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10683
10684 case BUILT_IN_FREE:
10685 if (integer_zerop (arg0))
10686 return build_empty_stmt (loc);
10687 break;
10688
10689 default:
10690 break;
10691 }
10692
10693 return NULL_TREE;
10694
10695 }
10696
10697 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10698 IGNORE is true if the result of the function call is ignored. This
10699 function returns NULL_TREE if no simplification was possible. */
10700
10701 static tree
10702 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10703 {
10704 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10705 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10706
10707 switch (fcode)
10708 {
10709 CASE_FLT_FN (BUILT_IN_JN):
10710 if (validate_arg (arg0, INTEGER_TYPE)
10711 && validate_arg (arg1, REAL_TYPE))
10712 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10713 break;
10714
10715 CASE_FLT_FN (BUILT_IN_YN):
10716 if (validate_arg (arg0, INTEGER_TYPE)
10717 && validate_arg (arg1, REAL_TYPE))
10718 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10719 &dconst0, false);
10720 break;
10721
10722 CASE_FLT_FN (BUILT_IN_DREM):
10723 CASE_FLT_FN (BUILT_IN_REMAINDER):
10724 if (validate_arg (arg0, REAL_TYPE)
10725 && validate_arg(arg1, REAL_TYPE))
10726 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10727 break;
10728
10729 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10730 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10731 if (validate_arg (arg0, REAL_TYPE)
10732 && validate_arg(arg1, POINTER_TYPE))
10733 return do_mpfr_lgamma_r (arg0, arg1, type);
10734 break;
10735
10736 CASE_FLT_FN (BUILT_IN_ATAN2):
10737 if (validate_arg (arg0, REAL_TYPE)
10738 && validate_arg(arg1, REAL_TYPE))
10739 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10740 break;
10741
10742 CASE_FLT_FN (BUILT_IN_FDIM):
10743 if (validate_arg (arg0, REAL_TYPE)
10744 && validate_arg(arg1, REAL_TYPE))
10745 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10746 break;
10747
10748 CASE_FLT_FN (BUILT_IN_HYPOT):
10749 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10750
10751 CASE_FLT_FN (BUILT_IN_CPOW):
10752 if (validate_arg (arg0, COMPLEX_TYPE)
10753 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10754 && validate_arg (arg1, COMPLEX_TYPE)
10755 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10756 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10757 break;
10758
10759 CASE_FLT_FN (BUILT_IN_LDEXP):
10760 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10761 CASE_FLT_FN (BUILT_IN_SCALBN):
10762 CASE_FLT_FN (BUILT_IN_SCALBLN):
10763 return fold_builtin_load_exponent (loc, arg0, arg1,
10764 type, /*ldexp=*/false);
10765
10766 CASE_FLT_FN (BUILT_IN_FREXP):
10767 return fold_builtin_frexp (loc, arg0, arg1, type);
10768
10769 CASE_FLT_FN (BUILT_IN_MODF):
10770 return fold_builtin_modf (loc, arg0, arg1, type);
10771
10772 case BUILT_IN_BZERO:
10773 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10774
10775 case BUILT_IN_FPUTS:
10776 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10777
10778 case BUILT_IN_FPUTS_UNLOCKED:
10779 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10780
10781 case BUILT_IN_STRSTR:
10782 return fold_builtin_strstr (loc, arg0, arg1, type);
10783
10784 case BUILT_IN_STRCAT:
10785 return fold_builtin_strcat (loc, arg0, arg1);
10786
10787 case BUILT_IN_STRSPN:
10788 return fold_builtin_strspn (loc, arg0, arg1);
10789
10790 case BUILT_IN_STRCSPN:
10791 return fold_builtin_strcspn (loc, arg0, arg1);
10792
10793 case BUILT_IN_STRCHR:
10794 case BUILT_IN_INDEX:
10795 return fold_builtin_strchr (loc, arg0, arg1, type);
10796
10797 case BUILT_IN_STRRCHR:
10798 case BUILT_IN_RINDEX:
10799 return fold_builtin_strrchr (loc, arg0, arg1, type);
10800
10801 case BUILT_IN_STRCPY:
10802 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10803
10804 case BUILT_IN_STPCPY:
10805 if (ignore)
10806 {
10807 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10808 if (!fn)
10809 break;
10810
10811 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10812 }
10813 else
10814 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10815 break;
10816
10817 case BUILT_IN_STRCMP:
10818 return fold_builtin_strcmp (loc, arg0, arg1);
10819
10820 case BUILT_IN_STRPBRK:
10821 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10822
10823 case BUILT_IN_EXPECT:
10824 return fold_builtin_expect (loc, arg0, arg1);
10825
10826 CASE_FLT_FN (BUILT_IN_POW):
10827 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10828
10829 CASE_FLT_FN (BUILT_IN_POWI):
10830 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10831
10832 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10833 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10834
10835 CASE_FLT_FN (BUILT_IN_FMIN):
10836 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10837
10838 CASE_FLT_FN (BUILT_IN_FMAX):
10839 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10840
10841 case BUILT_IN_ISGREATER:
10842 return fold_builtin_unordered_cmp (loc, fndecl,
10843 arg0, arg1, UNLE_EXPR, LE_EXPR);
10844 case BUILT_IN_ISGREATEREQUAL:
10845 return fold_builtin_unordered_cmp (loc, fndecl,
10846 arg0, arg1, UNLT_EXPR, LT_EXPR);
10847 case BUILT_IN_ISLESS:
10848 return fold_builtin_unordered_cmp (loc, fndecl,
10849 arg0, arg1, UNGE_EXPR, GE_EXPR);
10850 case BUILT_IN_ISLESSEQUAL:
10851 return fold_builtin_unordered_cmp (loc, fndecl,
10852 arg0, arg1, UNGT_EXPR, GT_EXPR);
10853 case BUILT_IN_ISLESSGREATER:
10854 return fold_builtin_unordered_cmp (loc, fndecl,
10855 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10856 case BUILT_IN_ISUNORDERED:
10857 return fold_builtin_unordered_cmp (loc, fndecl,
10858 arg0, arg1, UNORDERED_EXPR,
10859 NOP_EXPR);
10860
10861 /* We do the folding for va_start in the expander. */
10862 case BUILT_IN_VA_START:
10863 break;
10864
10865 case BUILT_IN_SPRINTF:
10866 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10867
10868 case BUILT_IN_OBJECT_SIZE:
10869 return fold_builtin_object_size (arg0, arg1);
10870
10871 case BUILT_IN_PRINTF:
10872 case BUILT_IN_PRINTF_UNLOCKED:
10873 case BUILT_IN_VPRINTF:
10874 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10875
10876 case BUILT_IN_PRINTF_CHK:
10877 case BUILT_IN_VPRINTF_CHK:
10878 if (!validate_arg (arg0, INTEGER_TYPE)
10879 || TREE_SIDE_EFFECTS (arg0))
10880 return NULL_TREE;
10881 else
10882 return fold_builtin_printf (loc, fndecl,
10883 arg1, NULL_TREE, ignore, fcode);
10884 break;
10885
10886 case BUILT_IN_FPRINTF:
10887 case BUILT_IN_FPRINTF_UNLOCKED:
10888 case BUILT_IN_VFPRINTF:
10889 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10890 ignore, fcode);
10891
10892 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10893 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10894
10895 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10896 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10897
10898 default:
10899 break;
10900 }
10901 return NULL_TREE;
10902 }
10903
10904 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10905 and ARG2. IGNORE is true if the result of the function call is ignored.
10906 This function returns NULL_TREE if no simplification was possible. */
10907
10908 static tree
10909 fold_builtin_3 (location_t loc, tree fndecl,
10910 tree arg0, tree arg1, tree arg2, bool ignore)
10911 {
10912 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10913 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10914 switch (fcode)
10915 {
10916
10917 CASE_FLT_FN (BUILT_IN_SINCOS):
10918 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10919
10920 CASE_FLT_FN (BUILT_IN_FMA):
10921 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10922 break;
10923
10924 CASE_FLT_FN (BUILT_IN_REMQUO):
10925 if (validate_arg (arg0, REAL_TYPE)
10926 && validate_arg(arg1, REAL_TYPE)
10927 && validate_arg(arg2, POINTER_TYPE))
10928 return do_mpfr_remquo (arg0, arg1, arg2);
10929 break;
10930
10931 case BUILT_IN_MEMSET:
10932 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10933
10934 case BUILT_IN_BCOPY:
10935 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10936 void_type_node, true, /*endp=*/3);
10937
10938 case BUILT_IN_MEMCPY:
10939 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10940 type, ignore, /*endp=*/0);
10941
10942 case BUILT_IN_MEMPCPY:
10943 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10944 type, ignore, /*endp=*/1);
10945
10946 case BUILT_IN_MEMMOVE:
10947 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10948 type, ignore, /*endp=*/3);
10949
10950 case BUILT_IN_STRNCAT:
10951 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10952
10953 case BUILT_IN_STRNCPY:
10954 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10955
10956 case BUILT_IN_STRNCMP:
10957 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10958
10959 case BUILT_IN_MEMCHR:
10960 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10961
10962 case BUILT_IN_BCMP:
10963 case BUILT_IN_MEMCMP:
10964 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10965
10966 case BUILT_IN_SPRINTF:
10967 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10968
10969 case BUILT_IN_SNPRINTF:
10970 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10971
10972 case BUILT_IN_STRCPY_CHK:
10973 case BUILT_IN_STPCPY_CHK:
10974 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10975 ignore, fcode);
10976
10977 case BUILT_IN_STRCAT_CHK:
10978 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10979
10980 case BUILT_IN_PRINTF_CHK:
10981 case BUILT_IN_VPRINTF_CHK:
10982 if (!validate_arg (arg0, INTEGER_TYPE)
10983 || TREE_SIDE_EFFECTS (arg0))
10984 return NULL_TREE;
10985 else
10986 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10987 break;
10988
10989 case BUILT_IN_FPRINTF:
10990 case BUILT_IN_FPRINTF_UNLOCKED:
10991 case BUILT_IN_VFPRINTF:
10992 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10993 ignore, fcode);
10994
10995 case BUILT_IN_FPRINTF_CHK:
10996 case BUILT_IN_VFPRINTF_CHK:
10997 if (!validate_arg (arg1, INTEGER_TYPE)
10998 || TREE_SIDE_EFFECTS (arg1))
10999 return NULL_TREE;
11000 else
11001 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11002 ignore, fcode);
11003
11004 default:
11005 break;
11006 }
11007 return NULL_TREE;
11008 }
11009
11010 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11011 ARG2, and ARG3. IGNORE is true if the result of the function call is
11012 ignored. This function returns NULL_TREE if no simplification was
11013 possible. */
11014
11015 static tree
11016 fold_builtin_4 (location_t loc, tree fndecl,
11017 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11018 {
11019 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11020
11021 switch (fcode)
11022 {
11023 case BUILT_IN_MEMCPY_CHK:
11024 case BUILT_IN_MEMPCPY_CHK:
11025 case BUILT_IN_MEMMOVE_CHK:
11026 case BUILT_IN_MEMSET_CHK:
11027 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11028 NULL_TREE, ignore,
11029 DECL_FUNCTION_CODE (fndecl));
11030
11031 case BUILT_IN_STRNCPY_CHK:
11032 case BUILT_IN_STPNCPY_CHK:
11033 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11034 ignore, fcode);
11035
11036 case BUILT_IN_STRNCAT_CHK:
11037 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11038
11039 case BUILT_IN_SNPRINTF:
11040 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11041
11042 case BUILT_IN_FPRINTF_CHK:
11043 case BUILT_IN_VFPRINTF_CHK:
11044 if (!validate_arg (arg1, INTEGER_TYPE)
11045 || TREE_SIDE_EFFECTS (arg1))
11046 return NULL_TREE;
11047 else
11048 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11049 ignore, fcode);
11050 break;
11051
11052 default:
11053 break;
11054 }
11055 return NULL_TREE;
11056 }
11057
11058 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11059 arguments, where NARGS <= 4. IGNORE is true if the result of the
11060 function call is ignored. This function returns NULL_TREE if no
11061 simplification was possible. Note that this only folds builtins with
11062 fixed argument patterns. Foldings that do varargs-to-varargs
11063 transformations, or that match calls with more than 4 arguments,
11064 need to be handled with fold_builtin_varargs instead. */
11065
11066 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11067
11068 static tree
11069 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11070 {
11071 tree ret = NULL_TREE;
11072
11073 switch (nargs)
11074 {
11075 case 0:
11076 ret = fold_builtin_0 (loc, fndecl, ignore);
11077 break;
11078 case 1:
11079 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11080 break;
11081 case 2:
11082 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11083 break;
11084 case 3:
11085 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11086 break;
11087 case 4:
11088 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11089 ignore);
11090 break;
11091 default:
11092 break;
11093 }
11094 if (ret)
11095 {
11096 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11097 SET_EXPR_LOCATION (ret, loc);
11098 TREE_NO_WARNING (ret) = 1;
11099 return ret;
11100 }
11101 return NULL_TREE;
11102 }
11103
11104 /* Builtins with folding operations that operate on "..." arguments
11105 need special handling; we need to store the arguments in a convenient
11106 data structure before attempting any folding. Fortunately there are
11107 only a few builtins that fall into this category. FNDECL is the
11108 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11109 result of the function call is ignored. */
11110
11111 static tree
11112 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11113 bool ignore ATTRIBUTE_UNUSED)
11114 {
11115 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11116 tree ret = NULL_TREE;
11117
11118 switch (fcode)
11119 {
11120 case BUILT_IN_SPRINTF_CHK:
11121 case BUILT_IN_VSPRINTF_CHK:
11122 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11123 break;
11124
11125 case BUILT_IN_SNPRINTF_CHK:
11126 case BUILT_IN_VSNPRINTF_CHK:
11127 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11128 break;
11129
11130 case BUILT_IN_FPCLASSIFY:
11131 ret = fold_builtin_fpclassify (loc, exp);
11132 break;
11133
11134 default:
11135 break;
11136 }
11137 if (ret)
11138 {
11139 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11140 SET_EXPR_LOCATION (ret, loc);
11141 TREE_NO_WARNING (ret) = 1;
11142 return ret;
11143 }
11144 return NULL_TREE;
11145 }
11146
11147 /* Return true if FNDECL shouldn't be folded right now.
11148 If a built-in function has an inline attribute always_inline
11149 wrapper, defer folding it after always_inline functions have
11150 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11151 might not be performed. */
11152
11153 bool
11154 avoid_folding_inline_builtin (tree fndecl)
11155 {
11156 return (DECL_DECLARED_INLINE_P (fndecl)
11157 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11158 && cfun
11159 && !cfun->always_inline_functions_inlined
11160 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11161 }
11162
11163 /* A wrapper function for builtin folding that prevents warnings for
11164 "statement without effect" and the like, caused by removing the
11165 call node earlier than the warning is generated. */
11166
11167 tree
11168 fold_call_expr (location_t loc, tree exp, bool ignore)
11169 {
11170 tree ret = NULL_TREE;
11171 tree fndecl = get_callee_fndecl (exp);
11172 if (fndecl
11173 && TREE_CODE (fndecl) == FUNCTION_DECL
11174 && DECL_BUILT_IN (fndecl)
11175 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11176 yet. Defer folding until we see all the arguments
11177 (after inlining). */
11178 && !CALL_EXPR_VA_ARG_PACK (exp))
11179 {
11180 int nargs = call_expr_nargs (exp);
11181
11182 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11183 instead last argument is __builtin_va_arg_pack (). Defer folding
11184 even in that case, until arguments are finalized. */
11185 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11186 {
11187 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11188 if (fndecl2
11189 && TREE_CODE (fndecl2) == FUNCTION_DECL
11190 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11191 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11192 return NULL_TREE;
11193 }
11194
11195 if (avoid_folding_inline_builtin (fndecl))
11196 return NULL_TREE;
11197
11198 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11199 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11200 CALL_EXPR_ARGP (exp), ignore);
11201 else
11202 {
11203 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11204 {
11205 tree *args = CALL_EXPR_ARGP (exp);
11206 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11207 }
11208 if (!ret)
11209 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11210 if (ret)
11211 return ret;
11212 }
11213 }
11214 return NULL_TREE;
11215 }
11216
11217 /* Conveniently construct a function call expression. FNDECL names the
11218 function to be called and N arguments are passed in the array
11219 ARGARRAY. */
11220
11221 tree
11222 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11223 {
11224 tree fntype = TREE_TYPE (fndecl);
11225 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11226
11227 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11228 }
11229
11230 /* Conveniently construct a function call expression. FNDECL names the
11231 function to be called and the arguments are passed in the vector
11232 VEC. */
11233
11234 tree
11235 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11236 {
11237 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11238 vec_safe_address (vec));
11239 }
11240
11241
11242 /* Conveniently construct a function call expression. FNDECL names the
11243 function to be called, N is the number of arguments, and the "..."
11244 parameters are the argument expressions. */
11245
11246 tree
11247 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11248 {
11249 va_list ap;
11250 tree *argarray = XALLOCAVEC (tree, n);
11251 int i;
11252
11253 va_start (ap, n);
11254 for (i = 0; i < n; i++)
11255 argarray[i] = va_arg (ap, tree);
11256 va_end (ap);
11257 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11258 }
11259
11260 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11261 varargs macros aren't supported by all bootstrap compilers. */
11262
11263 tree
11264 build_call_expr (tree fndecl, int n, ...)
11265 {
11266 va_list ap;
11267 tree *argarray = XALLOCAVEC (tree, n);
11268 int i;
11269
11270 va_start (ap, n);
11271 for (i = 0; i < n; i++)
11272 argarray[i] = va_arg (ap, tree);
11273 va_end (ap);
11274 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11275 }
11276
11277 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11278 N arguments are passed in the array ARGARRAY. */
11279
11280 tree
11281 fold_builtin_call_array (location_t loc, tree type,
11282 tree fn,
11283 int n,
11284 tree *argarray)
11285 {
11286 tree ret = NULL_TREE;
11287 tree exp;
11288
11289 if (TREE_CODE (fn) == ADDR_EXPR)
11290 {
11291 tree fndecl = TREE_OPERAND (fn, 0);
11292 if (TREE_CODE (fndecl) == FUNCTION_DECL
11293 && DECL_BUILT_IN (fndecl))
11294 {
11295 /* If last argument is __builtin_va_arg_pack (), arguments to this
11296 function are not finalized yet. Defer folding until they are. */
11297 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11298 {
11299 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11300 if (fndecl2
11301 && TREE_CODE (fndecl2) == FUNCTION_DECL
11302 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11303 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11304 return build_call_array_loc (loc, type, fn, n, argarray);
11305 }
11306 if (avoid_folding_inline_builtin (fndecl))
11307 return build_call_array_loc (loc, type, fn, n, argarray);
11308 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11309 {
11310 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11311 if (ret)
11312 return ret;
11313
11314 return build_call_array_loc (loc, type, fn, n, argarray);
11315 }
11316 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11317 {
11318 /* First try the transformations that don't require consing up
11319 an exp. */
11320 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11321 if (ret)
11322 return ret;
11323 }
11324
11325 /* If we got this far, we need to build an exp. */
11326 exp = build_call_array_loc (loc, type, fn, n, argarray);
11327 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11328 return ret ? ret : exp;
11329 }
11330 }
11331
11332 return build_call_array_loc (loc, type, fn, n, argarray);
11333 }
11334
11335 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11336 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11337 of arguments in ARGS to be omitted. OLDNARGS is the number of
11338 elements in ARGS. */
11339
11340 static tree
11341 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11342 int skip, tree fndecl, int n, va_list newargs)
11343 {
11344 int nargs = oldnargs - skip + n;
11345 tree *buffer;
11346
11347 if (n > 0)
11348 {
11349 int i, j;
11350
11351 buffer = XALLOCAVEC (tree, nargs);
11352 for (i = 0; i < n; i++)
11353 buffer[i] = va_arg (newargs, tree);
11354 for (j = skip; j < oldnargs; j++, i++)
11355 buffer[i] = args[j];
11356 }
11357 else
11358 buffer = args + skip;
11359
11360 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11361 }
11362
11363 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11364 list ARGS along with N new arguments specified as the "..."
11365 parameters. SKIP is the number of arguments in ARGS to be omitted.
11366 OLDNARGS is the number of elements in ARGS. */
11367
11368 static tree
11369 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11370 int skip, tree fndecl, int n, ...)
11371 {
11372 va_list ap;
11373 tree t;
11374
11375 va_start (ap, n);
11376 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11377 va_end (ap);
11378
11379 return t;
11380 }
11381
11382 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11383 along with N new arguments specified as the "..." parameters. SKIP
11384 is the number of arguments in EXP to be omitted. This function is used
11385 to do varargs-to-varargs transformations. */
11386
11387 static tree
11388 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11389 {
11390 va_list ap;
11391 tree t;
11392
11393 va_start (ap, n);
11394 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11395 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11396 va_end (ap);
11397
11398 return t;
11399 }
11400
11401 /* Validate a single argument ARG against a tree code CODE representing
11402 a type. */
11403
11404 static bool
11405 validate_arg (const_tree arg, enum tree_code code)
11406 {
11407 if (!arg)
11408 return false;
11409 else if (code == POINTER_TYPE)
11410 return POINTER_TYPE_P (TREE_TYPE (arg));
11411 else if (code == INTEGER_TYPE)
11412 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11413 return code == TREE_CODE (TREE_TYPE (arg));
11414 }
11415
11416 /* This function validates the types of a function call argument list
11417 against a specified list of tree_codes. If the last specifier is a 0,
11418 that represents an ellipses, otherwise the last specifier must be a
11419 VOID_TYPE.
11420
11421 This is the GIMPLE version of validate_arglist. Eventually we want to
11422 completely convert builtins.c to work from GIMPLEs and the tree based
11423 validate_arglist will then be removed. */
11424
11425 bool
11426 validate_gimple_arglist (const_gimple call, ...)
11427 {
11428 enum tree_code code;
11429 bool res = 0;
11430 va_list ap;
11431 const_tree arg;
11432 size_t i;
11433
11434 va_start (ap, call);
11435 i = 0;
11436
11437 do
11438 {
11439 code = (enum tree_code) va_arg (ap, int);
11440 switch (code)
11441 {
11442 case 0:
11443 /* This signifies an ellipses, any further arguments are all ok. */
11444 res = true;
11445 goto end;
11446 case VOID_TYPE:
11447 /* This signifies an endlink, if no arguments remain, return
11448 true, otherwise return false. */
11449 res = (i == gimple_call_num_args (call));
11450 goto end;
11451 default:
11452 /* If no parameters remain or the parameter's code does not
11453 match the specified code, return false. Otherwise continue
11454 checking any remaining arguments. */
11455 arg = gimple_call_arg (call, i++);
11456 if (!validate_arg (arg, code))
11457 goto end;
11458 break;
11459 }
11460 }
11461 while (1);
11462
11463 /* We need gotos here since we can only have one VA_CLOSE in a
11464 function. */
11465 end: ;
11466 va_end (ap);
11467
11468 return res;
11469 }
11470
11471 /* This function validates the types of a function call argument list
11472 against a specified list of tree_codes. If the last specifier is a 0,
11473 that represents an ellipses, otherwise the last specifier must be a
11474 VOID_TYPE. */
11475
11476 bool
11477 validate_arglist (const_tree callexpr, ...)
11478 {
11479 enum tree_code code;
11480 bool res = 0;
11481 va_list ap;
11482 const_call_expr_arg_iterator iter;
11483 const_tree arg;
11484
11485 va_start (ap, callexpr);
11486 init_const_call_expr_arg_iterator (callexpr, &iter);
11487
11488 do
11489 {
11490 code = (enum tree_code) va_arg (ap, int);
11491 switch (code)
11492 {
11493 case 0:
11494 /* This signifies an ellipses, any further arguments are all ok. */
11495 res = true;
11496 goto end;
11497 case VOID_TYPE:
11498 /* This signifies an endlink, if no arguments remain, return
11499 true, otherwise return false. */
11500 res = !more_const_call_expr_args_p (&iter);
11501 goto end;
11502 default:
11503 /* If no parameters remain or the parameter's code does not
11504 match the specified code, return false. Otherwise continue
11505 checking any remaining arguments. */
11506 arg = next_const_call_expr_arg (&iter);
11507 if (!validate_arg (arg, code))
11508 goto end;
11509 break;
11510 }
11511 }
11512 while (1);
11513
11514 /* We need gotos here since we can only have one VA_CLOSE in a
11515 function. */
11516 end: ;
11517 va_end (ap);
11518
11519 return res;
11520 }
11521
11522 /* Default target-specific builtin expander that does nothing. */
11523
11524 rtx
11525 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11526 rtx target ATTRIBUTE_UNUSED,
11527 rtx subtarget ATTRIBUTE_UNUSED,
11528 enum machine_mode mode ATTRIBUTE_UNUSED,
11529 int ignore ATTRIBUTE_UNUSED)
11530 {
11531 return NULL_RTX;
11532 }
11533
11534 /* Returns true is EXP represents data that would potentially reside
11535 in a readonly section. */
11536
11537 static bool
11538 readonly_data_expr (tree exp)
11539 {
11540 STRIP_NOPS (exp);
11541
11542 if (TREE_CODE (exp) != ADDR_EXPR)
11543 return false;
11544
11545 exp = get_base_address (TREE_OPERAND (exp, 0));
11546 if (!exp)
11547 return false;
11548
11549 /* Make sure we call decl_readonly_section only for trees it
11550 can handle (since it returns true for everything it doesn't
11551 understand). */
11552 if (TREE_CODE (exp) == STRING_CST
11553 || TREE_CODE (exp) == CONSTRUCTOR
11554 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11555 return decl_readonly_section (exp, 0);
11556 else
11557 return false;
11558 }
11559
11560 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11561 to the call, and TYPE is its return type.
11562
11563 Return NULL_TREE if no simplification was possible, otherwise return the
11564 simplified form of the call as a tree.
11565
11566 The simplified form may be a constant or other expression which
11567 computes the same value, but in a more efficient manner (including
11568 calls to other builtin functions).
11569
11570 The call may contain arguments which need to be evaluated, but
11571 which are not useful to determine the result of the call. In
11572 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11573 COMPOUND_EXPR will be an argument which must be evaluated.
11574 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11575 COMPOUND_EXPR in the chain will contain the tree for the simplified
11576 form of the builtin function call. */
11577
11578 static tree
11579 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11580 {
11581 if (!validate_arg (s1, POINTER_TYPE)
11582 || !validate_arg (s2, POINTER_TYPE))
11583 return NULL_TREE;
11584 else
11585 {
11586 tree fn;
11587 const char *p1, *p2;
11588
11589 p2 = c_getstr (s2);
11590 if (p2 == NULL)
11591 return NULL_TREE;
11592
11593 p1 = c_getstr (s1);
11594 if (p1 != NULL)
11595 {
11596 const char *r = strstr (p1, p2);
11597 tree tem;
11598
11599 if (r == NULL)
11600 return build_int_cst (TREE_TYPE (s1), 0);
11601
11602 /* Return an offset into the constant string argument. */
11603 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11604 return fold_convert_loc (loc, type, tem);
11605 }
11606
11607 /* The argument is const char *, and the result is char *, so we need
11608 a type conversion here to avoid a warning. */
11609 if (p2[0] == '\0')
11610 return fold_convert_loc (loc, type, s1);
11611
11612 if (p2[1] != '\0')
11613 return NULL_TREE;
11614
11615 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11616 if (!fn)
11617 return NULL_TREE;
11618
11619 /* New argument list transforming strstr(s1, s2) to
11620 strchr(s1, s2[0]). */
11621 return build_call_expr_loc (loc, fn, 2, s1,
11622 build_int_cst (integer_type_node, p2[0]));
11623 }
11624 }
11625
11626 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11627 the call, and TYPE is its return type.
11628
11629 Return NULL_TREE if no simplification was possible, otherwise return the
11630 simplified form of the call as a tree.
11631
11632 The simplified form may be a constant or other expression which
11633 computes the same value, but in a more efficient manner (including
11634 calls to other builtin functions).
11635
11636 The call may contain arguments which need to be evaluated, but
11637 which are not useful to determine the result of the call. In
11638 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11639 COMPOUND_EXPR will be an argument which must be evaluated.
11640 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11641 COMPOUND_EXPR in the chain will contain the tree for the simplified
11642 form of the builtin function call. */
11643
11644 static tree
11645 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11646 {
11647 if (!validate_arg (s1, POINTER_TYPE)
11648 || !validate_arg (s2, INTEGER_TYPE))
11649 return NULL_TREE;
11650 else
11651 {
11652 const char *p1;
11653
11654 if (TREE_CODE (s2) != INTEGER_CST)
11655 return NULL_TREE;
11656
11657 p1 = c_getstr (s1);
11658 if (p1 != NULL)
11659 {
11660 char c;
11661 const char *r;
11662 tree tem;
11663
11664 if (target_char_cast (s2, &c))
11665 return NULL_TREE;
11666
11667 r = strchr (p1, c);
11668
11669 if (r == NULL)
11670 return build_int_cst (TREE_TYPE (s1), 0);
11671
11672 /* Return an offset into the constant string argument. */
11673 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11674 return fold_convert_loc (loc, type, tem);
11675 }
11676 return NULL_TREE;
11677 }
11678 }
11679
11680 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11681 the call, and TYPE is its return type.
11682
11683 Return NULL_TREE if no simplification was possible, otherwise return the
11684 simplified form of the call as a tree.
11685
11686 The simplified form may be a constant or other expression which
11687 computes the same value, but in a more efficient manner (including
11688 calls to other builtin functions).
11689
11690 The call may contain arguments which need to be evaluated, but
11691 which are not useful to determine the result of the call. In
11692 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11693 COMPOUND_EXPR will be an argument which must be evaluated.
11694 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11695 COMPOUND_EXPR in the chain will contain the tree for the simplified
11696 form of the builtin function call. */
11697
11698 static tree
11699 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11700 {
11701 if (!validate_arg (s1, POINTER_TYPE)
11702 || !validate_arg (s2, INTEGER_TYPE))
11703 return NULL_TREE;
11704 else
11705 {
11706 tree fn;
11707 const char *p1;
11708
11709 if (TREE_CODE (s2) != INTEGER_CST)
11710 return NULL_TREE;
11711
11712 p1 = c_getstr (s1);
11713 if (p1 != NULL)
11714 {
11715 char c;
11716 const char *r;
11717 tree tem;
11718
11719 if (target_char_cast (s2, &c))
11720 return NULL_TREE;
11721
11722 r = strrchr (p1, c);
11723
11724 if (r == NULL)
11725 return build_int_cst (TREE_TYPE (s1), 0);
11726
11727 /* Return an offset into the constant string argument. */
11728 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11729 return fold_convert_loc (loc, type, tem);
11730 }
11731
11732 if (! integer_zerop (s2))
11733 return NULL_TREE;
11734
11735 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11736 if (!fn)
11737 return NULL_TREE;
11738
11739 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11740 return build_call_expr_loc (loc, fn, 2, s1, s2);
11741 }
11742 }
11743
11744 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11745 to the call, and TYPE is its return type.
11746
11747 Return NULL_TREE if no simplification was possible, otherwise return the
11748 simplified form of the call as a tree.
11749
11750 The simplified form may be a constant or other expression which
11751 computes the same value, but in a more efficient manner (including
11752 calls to other builtin functions).
11753
11754 The call may contain arguments which need to be evaluated, but
11755 which are not useful to determine the result of the call. In
11756 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11757 COMPOUND_EXPR will be an argument which must be evaluated.
11758 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11759 COMPOUND_EXPR in the chain will contain the tree for the simplified
11760 form of the builtin function call. */
11761
11762 static tree
11763 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11764 {
11765 if (!validate_arg (s1, POINTER_TYPE)
11766 || !validate_arg (s2, POINTER_TYPE))
11767 return NULL_TREE;
11768 else
11769 {
11770 tree fn;
11771 const char *p1, *p2;
11772
11773 p2 = c_getstr (s2);
11774 if (p2 == NULL)
11775 return NULL_TREE;
11776
11777 p1 = c_getstr (s1);
11778 if (p1 != NULL)
11779 {
11780 const char *r = strpbrk (p1, p2);
11781 tree tem;
11782
11783 if (r == NULL)
11784 return build_int_cst (TREE_TYPE (s1), 0);
11785
11786 /* Return an offset into the constant string argument. */
11787 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11788 return fold_convert_loc (loc, type, tem);
11789 }
11790
11791 if (p2[0] == '\0')
11792 /* strpbrk(x, "") == NULL.
11793 Evaluate and ignore s1 in case it had side-effects. */
11794 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11795
11796 if (p2[1] != '\0')
11797 return NULL_TREE; /* Really call strpbrk. */
11798
11799 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11800 if (!fn)
11801 return NULL_TREE;
11802
11803 /* New argument list transforming strpbrk(s1, s2) to
11804 strchr(s1, s2[0]). */
11805 return build_call_expr_loc (loc, fn, 2, s1,
11806 build_int_cst (integer_type_node, p2[0]));
11807 }
11808 }
11809
11810 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11811 to the call.
11812
11813 Return NULL_TREE if no simplification was possible, otherwise return the
11814 simplified form of the call as a tree.
11815
11816 The simplified form may be a constant or other expression which
11817 computes the same value, but in a more efficient manner (including
11818 calls to other builtin functions).
11819
11820 The call may contain arguments which need to be evaluated, but
11821 which are not useful to determine the result of the call. In
11822 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11823 COMPOUND_EXPR will be an argument which must be evaluated.
11824 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11825 COMPOUND_EXPR in the chain will contain the tree for the simplified
11826 form of the builtin function call. */
11827
11828 static tree
11829 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11830 {
11831 if (!validate_arg (dst, POINTER_TYPE)
11832 || !validate_arg (src, POINTER_TYPE))
11833 return NULL_TREE;
11834 else
11835 {
11836 const char *p = c_getstr (src);
11837
11838 /* If the string length is zero, return the dst parameter. */
11839 if (p && *p == '\0')
11840 return dst;
11841
11842 if (optimize_insn_for_speed_p ())
11843 {
11844 /* See if we can store by pieces into (dst + strlen(dst)). */
11845 tree newdst, call;
11846 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11847 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11848
11849 if (!strlen_fn || !strcpy_fn)
11850 return NULL_TREE;
11851
11852 /* If we don't have a movstr we don't want to emit an strcpy
11853 call. We have to do that if the length of the source string
11854 isn't computable (in that case we can use memcpy probably
11855 later expanding to a sequence of mov instructions). If we
11856 have movstr instructions we can emit strcpy calls. */
11857 if (!HAVE_movstr)
11858 {
11859 tree len = c_strlen (src, 1);
11860 if (! len || TREE_SIDE_EFFECTS (len))
11861 return NULL_TREE;
11862 }
11863
11864 /* Stabilize the argument list. */
11865 dst = builtin_save_expr (dst);
11866
11867 /* Create strlen (dst). */
11868 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11869 /* Create (dst p+ strlen (dst)). */
11870
11871 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11872 newdst = builtin_save_expr (newdst);
11873
11874 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11875 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11876 }
11877 return NULL_TREE;
11878 }
11879 }
11880
11881 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11882 arguments to the call.
11883
11884 Return NULL_TREE if no simplification was possible, otherwise return the
11885 simplified form of the call as a tree.
11886
11887 The simplified form may be a constant or other expression which
11888 computes the same value, but in a more efficient manner (including
11889 calls to other builtin functions).
11890
11891 The call may contain arguments which need to be evaluated, but
11892 which are not useful to determine the result of the call. In
11893 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11894 COMPOUND_EXPR will be an argument which must be evaluated.
11895 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11896 COMPOUND_EXPR in the chain will contain the tree for the simplified
11897 form of the builtin function call. */
11898
11899 static tree
11900 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11901 {
11902 if (!validate_arg (dst, POINTER_TYPE)
11903 || !validate_arg (src, POINTER_TYPE)
11904 || !validate_arg (len, INTEGER_TYPE))
11905 return NULL_TREE;
11906 else
11907 {
11908 const char *p = c_getstr (src);
11909
11910 /* If the requested length is zero, or the src parameter string
11911 length is zero, return the dst parameter. */
11912 if (integer_zerop (len) || (p && *p == '\0'))
11913 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11914
11915 /* If the requested len is greater than or equal to the string
11916 length, call strcat. */
11917 if (TREE_CODE (len) == INTEGER_CST && p
11918 && compare_tree_int (len, strlen (p)) >= 0)
11919 {
11920 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11921
11922 /* If the replacement _DECL isn't initialized, don't do the
11923 transformation. */
11924 if (!fn)
11925 return NULL_TREE;
11926
11927 return build_call_expr_loc (loc, fn, 2, dst, src);
11928 }
11929 return NULL_TREE;
11930 }
11931 }
11932
11933 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11934 to the call.
11935
11936 Return NULL_TREE if no simplification was possible, otherwise return the
11937 simplified form of the call as a tree.
11938
11939 The simplified form may be a constant or other expression which
11940 computes the same value, but in a more efficient manner (including
11941 calls to other builtin functions).
11942
11943 The call may contain arguments which need to be evaluated, but
11944 which are not useful to determine the result of the call. In
11945 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11946 COMPOUND_EXPR will be an argument which must be evaluated.
11947 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11948 COMPOUND_EXPR in the chain will contain the tree for the simplified
11949 form of the builtin function call. */
11950
11951 static tree
11952 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11953 {
11954 if (!validate_arg (s1, POINTER_TYPE)
11955 || !validate_arg (s2, POINTER_TYPE))
11956 return NULL_TREE;
11957 else
11958 {
11959 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11960
11961 /* If both arguments are constants, evaluate at compile-time. */
11962 if (p1 && p2)
11963 {
11964 const size_t r = strspn (p1, p2);
11965 return build_int_cst (size_type_node, r);
11966 }
11967
11968 /* If either argument is "", return NULL_TREE. */
11969 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11970 /* Evaluate and ignore both arguments in case either one has
11971 side-effects. */
11972 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11973 s1, s2);
11974 return NULL_TREE;
11975 }
11976 }
11977
11978 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11979 to the call.
11980
11981 Return NULL_TREE if no simplification was possible, otherwise return the
11982 simplified form of the call as a tree.
11983
11984 The simplified form may be a constant or other expression which
11985 computes the same value, but in a more efficient manner (including
11986 calls to other builtin functions).
11987
11988 The call may contain arguments which need to be evaluated, but
11989 which are not useful to determine the result of the call. In
11990 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11991 COMPOUND_EXPR will be an argument which must be evaluated.
11992 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11993 COMPOUND_EXPR in the chain will contain the tree for the simplified
11994 form of the builtin function call. */
11995
11996 static tree
11997 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11998 {
11999 if (!validate_arg (s1, POINTER_TYPE)
12000 || !validate_arg (s2, POINTER_TYPE))
12001 return NULL_TREE;
12002 else
12003 {
12004 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12005
12006 /* If both arguments are constants, evaluate at compile-time. */
12007 if (p1 && p2)
12008 {
12009 const size_t r = strcspn (p1, p2);
12010 return build_int_cst (size_type_node, r);
12011 }
12012
12013 /* If the first argument is "", return NULL_TREE. */
12014 if (p1 && *p1 == '\0')
12015 {
12016 /* Evaluate and ignore argument s2 in case it has
12017 side-effects. */
12018 return omit_one_operand_loc (loc, size_type_node,
12019 size_zero_node, s2);
12020 }
12021
12022 /* If the second argument is "", return __builtin_strlen(s1). */
12023 if (p2 && *p2 == '\0')
12024 {
12025 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12026
12027 /* If the replacement _DECL isn't initialized, don't do the
12028 transformation. */
12029 if (!fn)
12030 return NULL_TREE;
12031
12032 return build_call_expr_loc (loc, fn, 1, s1);
12033 }
12034 return NULL_TREE;
12035 }
12036 }
12037
12038 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12039 to the call. IGNORE is true if the value returned
12040 by the builtin will be ignored. UNLOCKED is true is true if this
12041 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12042 the known length of the string. Return NULL_TREE if no simplification
12043 was possible. */
12044
12045 tree
12046 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
12047 bool ignore, bool unlocked, tree len)
12048 {
12049 /* If we're using an unlocked function, assume the other unlocked
12050 functions exist explicitly. */
12051 tree const fn_fputc = (unlocked
12052 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
12053 : builtin_decl_implicit (BUILT_IN_FPUTC));
12054 tree const fn_fwrite = (unlocked
12055 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12056 : builtin_decl_implicit (BUILT_IN_FWRITE));
12057
12058 /* If the return value is used, don't do the transformation. */
12059 if (!ignore)
12060 return NULL_TREE;
12061
12062 /* Verify the arguments in the original call. */
12063 if (!validate_arg (arg0, POINTER_TYPE)
12064 || !validate_arg (arg1, POINTER_TYPE))
12065 return NULL_TREE;
12066
12067 if (! len)
12068 len = c_strlen (arg0, 0);
12069
12070 /* Get the length of the string passed to fputs. If the length
12071 can't be determined, punt. */
12072 if (!len
12073 || TREE_CODE (len) != INTEGER_CST)
12074 return NULL_TREE;
12075
12076 switch (compare_tree_int (len, 1))
12077 {
12078 case -1: /* length is 0, delete the call entirely . */
12079 return omit_one_operand_loc (loc, integer_type_node,
12080 integer_zero_node, arg1);;
12081
12082 case 0: /* length is 1, call fputc. */
12083 {
12084 const char *p = c_getstr (arg0);
12085
12086 if (p != NULL)
12087 {
12088 if (fn_fputc)
12089 return build_call_expr_loc (loc, fn_fputc, 2,
12090 build_int_cst
12091 (integer_type_node, p[0]), arg1);
12092 else
12093 return NULL_TREE;
12094 }
12095 }
12096 /* FALLTHROUGH */
12097 case 1: /* length is greater than 1, call fwrite. */
12098 {
12099 /* If optimizing for size keep fputs. */
12100 if (optimize_function_for_size_p (cfun))
12101 return NULL_TREE;
12102 /* New argument list transforming fputs(string, stream) to
12103 fwrite(string, 1, len, stream). */
12104 if (fn_fwrite)
12105 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12106 size_one_node, len, arg1);
12107 else
12108 return NULL_TREE;
12109 }
12110 default:
12111 gcc_unreachable ();
12112 }
12113 return NULL_TREE;
12114 }
12115
12116 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12117 produced. False otherwise. This is done so that we don't output the error
12118 or warning twice or three times. */
12119
12120 bool
12121 fold_builtin_next_arg (tree exp, bool va_start_p)
12122 {
12123 tree fntype = TREE_TYPE (current_function_decl);
12124 int nargs = call_expr_nargs (exp);
12125 tree arg;
12126 /* There is good chance the current input_location points inside the
12127 definition of the va_start macro (perhaps on the token for
12128 builtin) in a system header, so warnings will not be emitted.
12129 Use the location in real source code. */
12130 source_location current_location =
12131 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12132 NULL);
12133
12134 if (!stdarg_p (fntype))
12135 {
12136 error ("%<va_start%> used in function with fixed args");
12137 return true;
12138 }
12139
12140 if (va_start_p)
12141 {
12142 if (va_start_p && (nargs != 2))
12143 {
12144 error ("wrong number of arguments to function %<va_start%>");
12145 return true;
12146 }
12147 arg = CALL_EXPR_ARG (exp, 1);
12148 }
12149 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12150 when we checked the arguments and if needed issued a warning. */
12151 else
12152 {
12153 if (nargs == 0)
12154 {
12155 /* Evidently an out of date version of <stdarg.h>; can't validate
12156 va_start's second argument, but can still work as intended. */
12157 warning_at (current_location,
12158 OPT_Wvarargs,
12159 "%<__builtin_next_arg%> called without an argument");
12160 return true;
12161 }
12162 else if (nargs > 1)
12163 {
12164 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12165 return true;
12166 }
12167 arg = CALL_EXPR_ARG (exp, 0);
12168 }
12169
12170 if (TREE_CODE (arg) == SSA_NAME)
12171 arg = SSA_NAME_VAR (arg);
12172
12173 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12174 or __builtin_next_arg (0) the first time we see it, after checking
12175 the arguments and if needed issuing a warning. */
12176 if (!integer_zerop (arg))
12177 {
12178 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12179
12180 /* Strip off all nops for the sake of the comparison. This
12181 is not quite the same as STRIP_NOPS. It does more.
12182 We must also strip off INDIRECT_EXPR for C++ reference
12183 parameters. */
12184 while (CONVERT_EXPR_P (arg)
12185 || TREE_CODE (arg) == INDIRECT_REF)
12186 arg = TREE_OPERAND (arg, 0);
12187 if (arg != last_parm)
12188 {
12189 /* FIXME: Sometimes with the tree optimizers we can get the
12190 not the last argument even though the user used the last
12191 argument. We just warn and set the arg to be the last
12192 argument so that we will get wrong-code because of
12193 it. */
12194 warning_at (current_location,
12195 OPT_Wvarargs,
12196 "second parameter of %<va_start%> not last named argument");
12197 }
12198
12199 /* Undefined by C99 7.15.1.4p4 (va_start):
12200 "If the parameter parmN is declared with the register storage
12201 class, with a function or array type, or with a type that is
12202 not compatible with the type that results after application of
12203 the default argument promotions, the behavior is undefined."
12204 */
12205 else if (DECL_REGISTER (arg))
12206 {
12207 warning_at (current_location,
12208 OPT_Wvarargs,
12209 "undefined behaviour when second parameter of "
12210 "%<va_start%> is declared with %<register%> storage");
12211 }
12212
12213 /* We want to verify the second parameter just once before the tree
12214 optimizers are run and then avoid keeping it in the tree,
12215 as otherwise we could warn even for correct code like:
12216 void foo (int i, ...)
12217 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12218 if (va_start_p)
12219 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12220 else
12221 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12222 }
12223 return false;
12224 }
12225
12226
12227 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12228 ORIG may be null if this is a 2-argument call. We don't attempt to
12229 simplify calls with more than 3 arguments.
12230
12231 Return NULL_TREE if no simplification was possible, otherwise return the
12232 simplified form of the call as a tree. If IGNORED is true, it means that
12233 the caller does not use the returned value of the function. */
12234
12235 static tree
12236 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12237 tree orig, int ignored)
12238 {
12239 tree call, retval;
12240 const char *fmt_str = NULL;
12241
12242 /* Verify the required arguments in the original call. We deal with two
12243 types of sprintf() calls: 'sprintf (str, fmt)' and
12244 'sprintf (dest, "%s", orig)'. */
12245 if (!validate_arg (dest, POINTER_TYPE)
12246 || !validate_arg (fmt, POINTER_TYPE))
12247 return NULL_TREE;
12248 if (orig && !validate_arg (orig, POINTER_TYPE))
12249 return NULL_TREE;
12250
12251 /* Check whether the format is a literal string constant. */
12252 fmt_str = c_getstr (fmt);
12253 if (fmt_str == NULL)
12254 return NULL_TREE;
12255
12256 call = NULL_TREE;
12257 retval = NULL_TREE;
12258
12259 if (!init_target_chars ())
12260 return NULL_TREE;
12261
12262 /* If the format doesn't contain % args or %%, use strcpy. */
12263 if (strchr (fmt_str, target_percent) == NULL)
12264 {
12265 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12266
12267 if (!fn)
12268 return NULL_TREE;
12269
12270 /* Don't optimize sprintf (buf, "abc", ptr++). */
12271 if (orig)
12272 return NULL_TREE;
12273
12274 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12275 'format' is known to contain no % formats. */
12276 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12277 if (!ignored)
12278 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12279 }
12280
12281 /* If the format is "%s", use strcpy if the result isn't used. */
12282 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12283 {
12284 tree fn;
12285 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12286
12287 if (!fn)
12288 return NULL_TREE;
12289
12290 /* Don't crash on sprintf (str1, "%s"). */
12291 if (!orig)
12292 return NULL_TREE;
12293
12294 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12295 if (!ignored)
12296 {
12297 retval = c_strlen (orig, 1);
12298 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12299 return NULL_TREE;
12300 }
12301 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12302 }
12303
12304 if (call && retval)
12305 {
12306 retval = fold_convert_loc
12307 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12308 retval);
12309 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12310 }
12311 else
12312 return call;
12313 }
12314
12315 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12316 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12317 attempt to simplify calls with more than 4 arguments.
12318
12319 Return NULL_TREE if no simplification was possible, otherwise return the
12320 simplified form of the call as a tree. If IGNORED is true, it means that
12321 the caller does not use the returned value of the function. */
12322
12323 static tree
12324 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12325 tree orig, int ignored)
12326 {
12327 tree call, retval;
12328 const char *fmt_str = NULL;
12329 unsigned HOST_WIDE_INT destlen;
12330
12331 /* Verify the required arguments in the original call. We deal with two
12332 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12333 'snprintf (dest, cst, "%s", orig)'. */
12334 if (!validate_arg (dest, POINTER_TYPE)
12335 || !validate_arg (destsize, INTEGER_TYPE)
12336 || !validate_arg (fmt, POINTER_TYPE))
12337 return NULL_TREE;
12338 if (orig && !validate_arg (orig, POINTER_TYPE))
12339 return NULL_TREE;
12340
12341 if (!host_integerp (destsize, 1))
12342 return NULL_TREE;
12343
12344 /* Check whether the format is a literal string constant. */
12345 fmt_str = c_getstr (fmt);
12346 if (fmt_str == NULL)
12347 return NULL_TREE;
12348
12349 call = NULL_TREE;
12350 retval = NULL_TREE;
12351
12352 if (!init_target_chars ())
12353 return NULL_TREE;
12354
12355 destlen = tree_low_cst (destsize, 1);
12356
12357 /* If the format doesn't contain % args or %%, use strcpy. */
12358 if (strchr (fmt_str, target_percent) == NULL)
12359 {
12360 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12361 size_t len = strlen (fmt_str);
12362
12363 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12364 if (orig)
12365 return NULL_TREE;
12366
12367 /* We could expand this as
12368 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12369 or to
12370 memcpy (str, fmt_with_nul_at_cstm1, cst);
12371 but in the former case that might increase code size
12372 and in the latter case grow .rodata section too much.
12373 So punt for now. */
12374 if (len >= destlen)
12375 return NULL_TREE;
12376
12377 if (!fn)
12378 return NULL_TREE;
12379
12380 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12381 'format' is known to contain no % formats and
12382 strlen (fmt) < cst. */
12383 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12384
12385 if (!ignored)
12386 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12387 }
12388
12389 /* If the format is "%s", use strcpy if the result isn't used. */
12390 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12391 {
12392 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12393 unsigned HOST_WIDE_INT origlen;
12394
12395 /* Don't crash on snprintf (str1, cst, "%s"). */
12396 if (!orig)
12397 return NULL_TREE;
12398
12399 retval = c_strlen (orig, 1);
12400 if (!retval || !host_integerp (retval, 1))
12401 return NULL_TREE;
12402
12403 origlen = tree_low_cst (retval, 1);
12404 /* We could expand this as
12405 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12406 or to
12407 memcpy (str1, str2_with_nul_at_cstm1, cst);
12408 but in the former case that might increase code size
12409 and in the latter case grow .rodata section too much.
12410 So punt for now. */
12411 if (origlen >= destlen)
12412 return NULL_TREE;
12413
12414 /* Convert snprintf (str1, cst, "%s", str2) into
12415 strcpy (str1, str2) if strlen (str2) < cst. */
12416 if (!fn)
12417 return NULL_TREE;
12418
12419 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12420
12421 if (ignored)
12422 retval = NULL_TREE;
12423 }
12424
12425 if (call && retval)
12426 {
12427 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12428 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12429 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12430 }
12431 else
12432 return call;
12433 }
12434
12435 /* Expand a call EXP to __builtin_object_size. */
12436
12437 rtx
12438 expand_builtin_object_size (tree exp)
12439 {
12440 tree ost;
12441 int object_size_type;
12442 tree fndecl = get_callee_fndecl (exp);
12443
12444 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12445 {
12446 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12447 exp, fndecl);
12448 expand_builtin_trap ();
12449 return const0_rtx;
12450 }
12451
12452 ost = CALL_EXPR_ARG (exp, 1);
12453 STRIP_NOPS (ost);
12454
12455 if (TREE_CODE (ost) != INTEGER_CST
12456 || tree_int_cst_sgn (ost) < 0
12457 || compare_tree_int (ost, 3) > 0)
12458 {
12459 error ("%Klast argument of %D is not integer constant between 0 and 3",
12460 exp, fndecl);
12461 expand_builtin_trap ();
12462 return const0_rtx;
12463 }
12464
12465 object_size_type = tree_low_cst (ost, 0);
12466
12467 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12468 }
12469
12470 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12471 FCODE is the BUILT_IN_* to use.
12472 Return NULL_RTX if we failed; the caller should emit a normal call,
12473 otherwise try to get the result in TARGET, if convenient (and in
12474 mode MODE if that's convenient). */
12475
12476 static rtx
12477 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12478 enum built_in_function fcode)
12479 {
12480 tree dest, src, len, size;
12481
12482 if (!validate_arglist (exp,
12483 POINTER_TYPE,
12484 fcode == BUILT_IN_MEMSET_CHK
12485 ? INTEGER_TYPE : POINTER_TYPE,
12486 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12487 return NULL_RTX;
12488
12489 dest = CALL_EXPR_ARG (exp, 0);
12490 src = CALL_EXPR_ARG (exp, 1);
12491 len = CALL_EXPR_ARG (exp, 2);
12492 size = CALL_EXPR_ARG (exp, 3);
12493
12494 if (! host_integerp (size, 1))
12495 return NULL_RTX;
12496
12497 if (host_integerp (len, 1) || integer_all_onesp (size))
12498 {
12499 tree fn;
12500
12501 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12502 {
12503 warning_at (tree_nonartificial_location (exp),
12504 0, "%Kcall to %D will always overflow destination buffer",
12505 exp, get_callee_fndecl (exp));
12506 return NULL_RTX;
12507 }
12508
12509 fn = NULL_TREE;
12510 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12511 mem{cpy,pcpy,move,set} is available. */
12512 switch (fcode)
12513 {
12514 case BUILT_IN_MEMCPY_CHK:
12515 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12516 break;
12517 case BUILT_IN_MEMPCPY_CHK:
12518 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12519 break;
12520 case BUILT_IN_MEMMOVE_CHK:
12521 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12522 break;
12523 case BUILT_IN_MEMSET_CHK:
12524 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12525 break;
12526 default:
12527 break;
12528 }
12529
12530 if (! fn)
12531 return NULL_RTX;
12532
12533 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12534 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12535 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12536 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12537 }
12538 else if (fcode == BUILT_IN_MEMSET_CHK)
12539 return NULL_RTX;
12540 else
12541 {
12542 unsigned int dest_align = get_pointer_alignment (dest);
12543
12544 /* If DEST is not a pointer type, call the normal function. */
12545 if (dest_align == 0)
12546 return NULL_RTX;
12547
12548 /* If SRC and DEST are the same (and not volatile), do nothing. */
12549 if (operand_equal_p (src, dest, 0))
12550 {
12551 tree expr;
12552
12553 if (fcode != BUILT_IN_MEMPCPY_CHK)
12554 {
12555 /* Evaluate and ignore LEN in case it has side-effects. */
12556 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12557 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12558 }
12559
12560 expr = fold_build_pointer_plus (dest, len);
12561 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12562 }
12563
12564 /* __memmove_chk special case. */
12565 if (fcode == BUILT_IN_MEMMOVE_CHK)
12566 {
12567 unsigned int src_align = get_pointer_alignment (src);
12568
12569 if (src_align == 0)
12570 return NULL_RTX;
12571
12572 /* If src is categorized for a readonly section we can use
12573 normal __memcpy_chk. */
12574 if (readonly_data_expr (src))
12575 {
12576 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12577 if (!fn)
12578 return NULL_RTX;
12579 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12580 dest, src, len, size);
12581 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12582 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12583 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12584 }
12585 }
12586 return NULL_RTX;
12587 }
12588 }
12589
12590 /* Emit warning if a buffer overflow is detected at compile time. */
12591
12592 static void
12593 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12594 {
12595 int is_strlen = 0;
12596 tree len, size;
12597 location_t loc = tree_nonartificial_location (exp);
12598
12599 switch (fcode)
12600 {
12601 case BUILT_IN_STRCPY_CHK:
12602 case BUILT_IN_STPCPY_CHK:
12603 /* For __strcat_chk the warning will be emitted only if overflowing
12604 by at least strlen (dest) + 1 bytes. */
12605 case BUILT_IN_STRCAT_CHK:
12606 len = CALL_EXPR_ARG (exp, 1);
12607 size = CALL_EXPR_ARG (exp, 2);
12608 is_strlen = 1;
12609 break;
12610 case BUILT_IN_STRNCAT_CHK:
12611 case BUILT_IN_STRNCPY_CHK:
12612 case BUILT_IN_STPNCPY_CHK:
12613 len = CALL_EXPR_ARG (exp, 2);
12614 size = CALL_EXPR_ARG (exp, 3);
12615 break;
12616 case BUILT_IN_SNPRINTF_CHK:
12617 case BUILT_IN_VSNPRINTF_CHK:
12618 len = CALL_EXPR_ARG (exp, 1);
12619 size = CALL_EXPR_ARG (exp, 3);
12620 break;
12621 default:
12622 gcc_unreachable ();
12623 }
12624
12625 if (!len || !size)
12626 return;
12627
12628 if (! host_integerp (size, 1) || integer_all_onesp (size))
12629 return;
12630
12631 if (is_strlen)
12632 {
12633 len = c_strlen (len, 1);
12634 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12635 return;
12636 }
12637 else if (fcode == BUILT_IN_STRNCAT_CHK)
12638 {
12639 tree src = CALL_EXPR_ARG (exp, 1);
12640 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12641 return;
12642 src = c_strlen (src, 1);
12643 if (! src || ! host_integerp (src, 1))
12644 {
12645 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12646 exp, get_callee_fndecl (exp));
12647 return;
12648 }
12649 else if (tree_int_cst_lt (src, size))
12650 return;
12651 }
12652 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12653 return;
12654
12655 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12656 exp, get_callee_fndecl (exp));
12657 }
12658
12659 /* Emit warning if a buffer overflow is detected at compile time
12660 in __sprintf_chk/__vsprintf_chk calls. */
12661
12662 static void
12663 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12664 {
12665 tree size, len, fmt;
12666 const char *fmt_str;
12667 int nargs = call_expr_nargs (exp);
12668
12669 /* Verify the required arguments in the original call. */
12670
12671 if (nargs < 4)
12672 return;
12673 size = CALL_EXPR_ARG (exp, 2);
12674 fmt = CALL_EXPR_ARG (exp, 3);
12675
12676 if (! host_integerp (size, 1) || integer_all_onesp (size))
12677 return;
12678
12679 /* Check whether the format is a literal string constant. */
12680 fmt_str = c_getstr (fmt);
12681 if (fmt_str == NULL)
12682 return;
12683
12684 if (!init_target_chars ())
12685 return;
12686
12687 /* If the format doesn't contain % args or %%, we know its size. */
12688 if (strchr (fmt_str, target_percent) == 0)
12689 len = build_int_cstu (size_type_node, strlen (fmt_str));
12690 /* If the format is "%s" and first ... argument is a string literal,
12691 we know it too. */
12692 else if (fcode == BUILT_IN_SPRINTF_CHK
12693 && strcmp (fmt_str, target_percent_s) == 0)
12694 {
12695 tree arg;
12696
12697 if (nargs < 5)
12698 return;
12699 arg = CALL_EXPR_ARG (exp, 4);
12700 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12701 return;
12702
12703 len = c_strlen (arg, 1);
12704 if (!len || ! host_integerp (len, 1))
12705 return;
12706 }
12707 else
12708 return;
12709
12710 if (! tree_int_cst_lt (len, size))
12711 warning_at (tree_nonartificial_location (exp),
12712 0, "%Kcall to %D will always overflow destination buffer",
12713 exp, get_callee_fndecl (exp));
12714 }
12715
12716 /* Emit warning if a free is called with address of a variable. */
12717
12718 static void
12719 maybe_emit_free_warning (tree exp)
12720 {
12721 tree arg = CALL_EXPR_ARG (exp, 0);
12722
12723 STRIP_NOPS (arg);
12724 if (TREE_CODE (arg) != ADDR_EXPR)
12725 return;
12726
12727 arg = get_base_address (TREE_OPERAND (arg, 0));
12728 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12729 return;
12730
12731 if (SSA_VAR_P (arg))
12732 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12733 "%Kattempt to free a non-heap object %qD", exp, arg);
12734 else
12735 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12736 "%Kattempt to free a non-heap object", exp);
12737 }
12738
12739 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12740 if possible. */
12741
12742 tree
12743 fold_builtin_object_size (tree ptr, tree ost)
12744 {
12745 unsigned HOST_WIDE_INT bytes;
12746 int object_size_type;
12747
12748 if (!validate_arg (ptr, POINTER_TYPE)
12749 || !validate_arg (ost, INTEGER_TYPE))
12750 return NULL_TREE;
12751
12752 STRIP_NOPS (ost);
12753
12754 if (TREE_CODE (ost) != INTEGER_CST
12755 || tree_int_cst_sgn (ost) < 0
12756 || compare_tree_int (ost, 3) > 0)
12757 return NULL_TREE;
12758
12759 object_size_type = tree_low_cst (ost, 0);
12760
12761 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12762 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12763 and (size_t) 0 for types 2 and 3. */
12764 if (TREE_SIDE_EFFECTS (ptr))
12765 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12766
12767 if (TREE_CODE (ptr) == ADDR_EXPR)
12768 {
12769 bytes = compute_builtin_object_size (ptr, object_size_type);
12770 if (double_int_fits_to_tree_p (size_type_node,
12771 double_int::from_uhwi (bytes)))
12772 return build_int_cstu (size_type_node, bytes);
12773 }
12774 else if (TREE_CODE (ptr) == SSA_NAME)
12775 {
12776 /* If object size is not known yet, delay folding until
12777 later. Maybe subsequent passes will help determining
12778 it. */
12779 bytes = compute_builtin_object_size (ptr, object_size_type);
12780 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12781 && double_int_fits_to_tree_p (size_type_node,
12782 double_int::from_uhwi (bytes)))
12783 return build_int_cstu (size_type_node, bytes);
12784 }
12785
12786 return NULL_TREE;
12787 }
12788
12789 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12790 DEST, SRC, LEN, and SIZE are the arguments to the call.
12791 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12792 code of the builtin. If MAXLEN is not NULL, it is maximum length
12793 passed as third argument. */
12794
12795 tree
12796 fold_builtin_memory_chk (location_t loc, tree fndecl,
12797 tree dest, tree src, tree len, tree size,
12798 tree maxlen, bool ignore,
12799 enum built_in_function fcode)
12800 {
12801 tree fn;
12802
12803 if (!validate_arg (dest, POINTER_TYPE)
12804 || !validate_arg (src,
12805 (fcode == BUILT_IN_MEMSET_CHK
12806 ? INTEGER_TYPE : POINTER_TYPE))
12807 || !validate_arg (len, INTEGER_TYPE)
12808 || !validate_arg (size, INTEGER_TYPE))
12809 return NULL_TREE;
12810
12811 /* If SRC and DEST are the same (and not volatile), return DEST
12812 (resp. DEST+LEN for __mempcpy_chk). */
12813 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12814 {
12815 if (fcode != BUILT_IN_MEMPCPY_CHK)
12816 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12817 dest, len);
12818 else
12819 {
12820 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12821 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12822 }
12823 }
12824
12825 if (! host_integerp (size, 1))
12826 return NULL_TREE;
12827
12828 if (! integer_all_onesp (size))
12829 {
12830 if (! host_integerp (len, 1))
12831 {
12832 /* If LEN is not constant, try MAXLEN too.
12833 For MAXLEN only allow optimizing into non-_ocs function
12834 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12835 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12836 {
12837 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12838 {
12839 /* (void) __mempcpy_chk () can be optimized into
12840 (void) __memcpy_chk (). */
12841 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12842 if (!fn)
12843 return NULL_TREE;
12844
12845 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12846 }
12847 return NULL_TREE;
12848 }
12849 }
12850 else
12851 maxlen = len;
12852
12853 if (tree_int_cst_lt (size, maxlen))
12854 return NULL_TREE;
12855 }
12856
12857 fn = NULL_TREE;
12858 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12859 mem{cpy,pcpy,move,set} is available. */
12860 switch (fcode)
12861 {
12862 case BUILT_IN_MEMCPY_CHK:
12863 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12864 break;
12865 case BUILT_IN_MEMPCPY_CHK:
12866 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12867 break;
12868 case BUILT_IN_MEMMOVE_CHK:
12869 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12870 break;
12871 case BUILT_IN_MEMSET_CHK:
12872 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12873 break;
12874 default:
12875 break;
12876 }
12877
12878 if (!fn)
12879 return NULL_TREE;
12880
12881 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12882 }
12883
12884 /* Fold a call to the __st[rp]cpy_chk builtin.
12885 DEST, SRC, and SIZE are the arguments to the call.
12886 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12887 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12888 strings passed as second argument. */
12889
12890 tree
12891 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12892 tree src, tree size,
12893 tree maxlen, bool ignore,
12894 enum built_in_function fcode)
12895 {
12896 tree len, fn;
12897
12898 if (!validate_arg (dest, POINTER_TYPE)
12899 || !validate_arg (src, POINTER_TYPE)
12900 || !validate_arg (size, INTEGER_TYPE))
12901 return NULL_TREE;
12902
12903 /* If SRC and DEST are the same (and not volatile), return DEST. */
12904 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12905 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12906
12907 if (! host_integerp (size, 1))
12908 return NULL_TREE;
12909
12910 if (! integer_all_onesp (size))
12911 {
12912 len = c_strlen (src, 1);
12913 if (! len || ! host_integerp (len, 1))
12914 {
12915 /* If LEN is not constant, try MAXLEN too.
12916 For MAXLEN only allow optimizing into non-_ocs function
12917 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12918 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12919 {
12920 if (fcode == BUILT_IN_STPCPY_CHK)
12921 {
12922 if (! ignore)
12923 return NULL_TREE;
12924
12925 /* If return value of __stpcpy_chk is ignored,
12926 optimize into __strcpy_chk. */
12927 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12928 if (!fn)
12929 return NULL_TREE;
12930
12931 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12932 }
12933
12934 if (! len || TREE_SIDE_EFFECTS (len))
12935 return NULL_TREE;
12936
12937 /* If c_strlen returned something, but not a constant,
12938 transform __strcpy_chk into __memcpy_chk. */
12939 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12940 if (!fn)
12941 return NULL_TREE;
12942
12943 len = fold_convert_loc (loc, size_type_node, len);
12944 len = size_binop_loc (loc, PLUS_EXPR, len,
12945 build_int_cst (size_type_node, 1));
12946 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12947 build_call_expr_loc (loc, fn, 4,
12948 dest, src, len, size));
12949 }
12950 }
12951 else
12952 maxlen = len;
12953
12954 if (! tree_int_cst_lt (maxlen, size))
12955 return NULL_TREE;
12956 }
12957
12958 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12959 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12960 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12961 if (!fn)
12962 return NULL_TREE;
12963
12964 return build_call_expr_loc (loc, fn, 2, dest, src);
12965 }
12966
12967 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12968 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12969 length passed as third argument. IGNORE is true if return value can be
12970 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12971
12972 tree
12973 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12974 tree len, tree size, tree maxlen, bool ignore,
12975 enum built_in_function fcode)
12976 {
12977 tree fn;
12978
12979 if (!validate_arg (dest, POINTER_TYPE)
12980 || !validate_arg (src, POINTER_TYPE)
12981 || !validate_arg (len, INTEGER_TYPE)
12982 || !validate_arg (size, INTEGER_TYPE))
12983 return NULL_TREE;
12984
12985 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12986 {
12987 /* If return value of __stpncpy_chk is ignored,
12988 optimize into __strncpy_chk. */
12989 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12990 if (fn)
12991 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12992 }
12993
12994 if (! host_integerp (size, 1))
12995 return NULL_TREE;
12996
12997 if (! integer_all_onesp (size))
12998 {
12999 if (! host_integerp (len, 1))
13000 {
13001 /* If LEN is not constant, try MAXLEN too.
13002 For MAXLEN only allow optimizing into non-_ocs function
13003 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13004 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13005 return NULL_TREE;
13006 }
13007 else
13008 maxlen = len;
13009
13010 if (tree_int_cst_lt (size, maxlen))
13011 return NULL_TREE;
13012 }
13013
13014 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13015 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
13016 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
13017 if (!fn)
13018 return NULL_TREE;
13019
13020 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13021 }
13022
13023 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13024 are the arguments to the call. */
13025
13026 static tree
13027 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
13028 tree src, tree size)
13029 {
13030 tree fn;
13031 const char *p;
13032
13033 if (!validate_arg (dest, POINTER_TYPE)
13034 || !validate_arg (src, POINTER_TYPE)
13035 || !validate_arg (size, INTEGER_TYPE))
13036 return NULL_TREE;
13037
13038 p = c_getstr (src);
13039 /* If the SRC parameter is "", return DEST. */
13040 if (p && *p == '\0')
13041 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13042
13043 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
13044 return NULL_TREE;
13045
13046 /* If __builtin_strcat_chk is used, assume strcat is available. */
13047 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
13048 if (!fn)
13049 return NULL_TREE;
13050
13051 return build_call_expr_loc (loc, fn, 2, dest, src);
13052 }
13053
13054 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13055 LEN, and SIZE. */
13056
13057 static tree
13058 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13059 tree dest, tree src, tree len, tree size)
13060 {
13061 tree fn;
13062 const char *p;
13063
13064 if (!validate_arg (dest, POINTER_TYPE)
13065 || !validate_arg (src, POINTER_TYPE)
13066 || !validate_arg (size, INTEGER_TYPE)
13067 || !validate_arg (size, INTEGER_TYPE))
13068 return NULL_TREE;
13069
13070 p = c_getstr (src);
13071 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13072 if (p && *p == '\0')
13073 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13074 else if (integer_zerop (len))
13075 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13076
13077 if (! host_integerp (size, 1))
13078 return NULL_TREE;
13079
13080 if (! integer_all_onesp (size))
13081 {
13082 tree src_len = c_strlen (src, 1);
13083 if (src_len
13084 && host_integerp (src_len, 1)
13085 && host_integerp (len, 1)
13086 && ! tree_int_cst_lt (len, src_len))
13087 {
13088 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13089 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13090 if (!fn)
13091 return NULL_TREE;
13092
13093 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13094 }
13095 return NULL_TREE;
13096 }
13097
13098 /* If __builtin_strncat_chk is used, assume strncat is available. */
13099 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13100 if (!fn)
13101 return NULL_TREE;
13102
13103 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13104 }
13105
13106 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13107 Return NULL_TREE if a normal call should be emitted rather than
13108 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13109 or BUILT_IN_VSPRINTF_CHK. */
13110
13111 static tree
13112 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13113 enum built_in_function fcode)
13114 {
13115 tree dest, size, len, fn, fmt, flag;
13116 const char *fmt_str;
13117
13118 /* Verify the required arguments in the original call. */
13119 if (nargs < 4)
13120 return NULL_TREE;
13121 dest = args[0];
13122 if (!validate_arg (dest, POINTER_TYPE))
13123 return NULL_TREE;
13124 flag = args[1];
13125 if (!validate_arg (flag, INTEGER_TYPE))
13126 return NULL_TREE;
13127 size = args[2];
13128 if (!validate_arg (size, INTEGER_TYPE))
13129 return NULL_TREE;
13130 fmt = args[3];
13131 if (!validate_arg (fmt, POINTER_TYPE))
13132 return NULL_TREE;
13133
13134 if (! host_integerp (size, 1))
13135 return NULL_TREE;
13136
13137 len = NULL_TREE;
13138
13139 if (!init_target_chars ())
13140 return NULL_TREE;
13141
13142 /* Check whether the format is a literal string constant. */
13143 fmt_str = c_getstr (fmt);
13144 if (fmt_str != NULL)
13145 {
13146 /* If the format doesn't contain % args or %%, we know the size. */
13147 if (strchr (fmt_str, target_percent) == 0)
13148 {
13149 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13150 len = build_int_cstu (size_type_node, strlen (fmt_str));
13151 }
13152 /* If the format is "%s" and first ... argument is a string literal,
13153 we know the size too. */
13154 else if (fcode == BUILT_IN_SPRINTF_CHK
13155 && strcmp (fmt_str, target_percent_s) == 0)
13156 {
13157 tree arg;
13158
13159 if (nargs == 5)
13160 {
13161 arg = args[4];
13162 if (validate_arg (arg, POINTER_TYPE))
13163 {
13164 len = c_strlen (arg, 1);
13165 if (! len || ! host_integerp (len, 1))
13166 len = NULL_TREE;
13167 }
13168 }
13169 }
13170 }
13171
13172 if (! integer_all_onesp (size))
13173 {
13174 if (! len || ! tree_int_cst_lt (len, size))
13175 return NULL_TREE;
13176 }
13177
13178 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13179 or if format doesn't contain % chars or is "%s". */
13180 if (! integer_zerop (flag))
13181 {
13182 if (fmt_str == NULL)
13183 return NULL_TREE;
13184 if (strchr (fmt_str, target_percent) != NULL
13185 && strcmp (fmt_str, target_percent_s))
13186 return NULL_TREE;
13187 }
13188
13189 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13190 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13191 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13192 if (!fn)
13193 return NULL_TREE;
13194
13195 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13196 }
13197
13198 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13199 a normal call should be emitted rather than expanding the function
13200 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13201
13202 static tree
13203 fold_builtin_sprintf_chk (location_t loc, tree exp,
13204 enum built_in_function fcode)
13205 {
13206 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13207 CALL_EXPR_ARGP (exp), fcode);
13208 }
13209
13210 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13211 NULL_TREE if a normal call should be emitted rather than expanding
13212 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13213 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13214 passed as second argument. */
13215
13216 static tree
13217 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13218 tree maxlen, enum built_in_function fcode)
13219 {
13220 tree dest, size, len, fn, fmt, flag;
13221 const char *fmt_str;
13222
13223 /* Verify the required arguments in the original call. */
13224 if (nargs < 5)
13225 return NULL_TREE;
13226 dest = args[0];
13227 if (!validate_arg (dest, POINTER_TYPE))
13228 return NULL_TREE;
13229 len = args[1];
13230 if (!validate_arg (len, INTEGER_TYPE))
13231 return NULL_TREE;
13232 flag = args[2];
13233 if (!validate_arg (flag, INTEGER_TYPE))
13234 return NULL_TREE;
13235 size = args[3];
13236 if (!validate_arg (size, INTEGER_TYPE))
13237 return NULL_TREE;
13238 fmt = args[4];
13239 if (!validate_arg (fmt, POINTER_TYPE))
13240 return NULL_TREE;
13241
13242 if (! host_integerp (size, 1))
13243 return NULL_TREE;
13244
13245 if (! integer_all_onesp (size))
13246 {
13247 if (! host_integerp (len, 1))
13248 {
13249 /* If LEN is not constant, try MAXLEN too.
13250 For MAXLEN only allow optimizing into non-_ocs function
13251 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13252 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13253 return NULL_TREE;
13254 }
13255 else
13256 maxlen = len;
13257
13258 if (tree_int_cst_lt (size, maxlen))
13259 return NULL_TREE;
13260 }
13261
13262 if (!init_target_chars ())
13263 return NULL_TREE;
13264
13265 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13266 or if format doesn't contain % chars or is "%s". */
13267 if (! integer_zerop (flag))
13268 {
13269 fmt_str = c_getstr (fmt);
13270 if (fmt_str == NULL)
13271 return NULL_TREE;
13272 if (strchr (fmt_str, target_percent) != NULL
13273 && strcmp (fmt_str, target_percent_s))
13274 return NULL_TREE;
13275 }
13276
13277 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13278 available. */
13279 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13280 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13281 if (!fn)
13282 return NULL_TREE;
13283
13284 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13285 }
13286
13287 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13288 a normal call should be emitted rather than expanding the function
13289 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13290 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13291 passed as second argument. */
13292
13293 tree
13294 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13295 enum built_in_function fcode)
13296 {
13297 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13298 CALL_EXPR_ARGP (exp), maxlen, fcode);
13299 }
13300
13301 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13302 FMT and ARG are the arguments to the call; we don't fold cases with
13303 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13304
13305 Return NULL_TREE if no simplification was possible, otherwise return the
13306 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13307 code of the function to be simplified. */
13308
13309 static tree
13310 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13311 tree arg, bool ignore,
13312 enum built_in_function fcode)
13313 {
13314 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13315 const char *fmt_str = NULL;
13316
13317 /* If the return value is used, don't do the transformation. */
13318 if (! ignore)
13319 return NULL_TREE;
13320
13321 /* Verify the required arguments in the original call. */
13322 if (!validate_arg (fmt, POINTER_TYPE))
13323 return NULL_TREE;
13324
13325 /* Check whether the format is a literal string constant. */
13326 fmt_str = c_getstr (fmt);
13327 if (fmt_str == NULL)
13328 return NULL_TREE;
13329
13330 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13331 {
13332 /* If we're using an unlocked function, assume the other
13333 unlocked functions exist explicitly. */
13334 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13335 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13336 }
13337 else
13338 {
13339 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13340 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13341 }
13342
13343 if (!init_target_chars ())
13344 return NULL_TREE;
13345
13346 if (strcmp (fmt_str, target_percent_s) == 0
13347 || strchr (fmt_str, target_percent) == NULL)
13348 {
13349 const char *str;
13350
13351 if (strcmp (fmt_str, target_percent_s) == 0)
13352 {
13353 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13354 return NULL_TREE;
13355
13356 if (!arg || !validate_arg (arg, POINTER_TYPE))
13357 return NULL_TREE;
13358
13359 str = c_getstr (arg);
13360 if (str == NULL)
13361 return NULL_TREE;
13362 }
13363 else
13364 {
13365 /* The format specifier doesn't contain any '%' characters. */
13366 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13367 && arg)
13368 return NULL_TREE;
13369 str = fmt_str;
13370 }
13371
13372 /* If the string was "", printf does nothing. */
13373 if (str[0] == '\0')
13374 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13375
13376 /* If the string has length of 1, call putchar. */
13377 if (str[1] == '\0')
13378 {
13379 /* Given printf("c"), (where c is any one character,)
13380 convert "c"[0] to an int and pass that to the replacement
13381 function. */
13382 newarg = build_int_cst (integer_type_node, str[0]);
13383 if (fn_putchar)
13384 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13385 }
13386 else
13387 {
13388 /* If the string was "string\n", call puts("string"). */
13389 size_t len = strlen (str);
13390 if ((unsigned char)str[len - 1] == target_newline
13391 && (size_t) (int) len == len
13392 && (int) len > 0)
13393 {
13394 char *newstr;
13395 tree offset_node, string_cst;
13396
13397 /* Create a NUL-terminated string that's one char shorter
13398 than the original, stripping off the trailing '\n'. */
13399 newarg = build_string_literal (len, str);
13400 string_cst = string_constant (newarg, &offset_node);
13401 gcc_checking_assert (string_cst
13402 && (TREE_STRING_LENGTH (string_cst)
13403 == (int) len)
13404 && integer_zerop (offset_node)
13405 && (unsigned char)
13406 TREE_STRING_POINTER (string_cst)[len - 1]
13407 == target_newline);
13408 /* build_string_literal creates a new STRING_CST,
13409 modify it in place to avoid double copying. */
13410 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13411 newstr[len - 1] = '\0';
13412 if (fn_puts)
13413 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13414 }
13415 else
13416 /* We'd like to arrange to call fputs(string,stdout) here,
13417 but we need stdout and don't have a way to get it yet. */
13418 return NULL_TREE;
13419 }
13420 }
13421
13422 /* The other optimizations can be done only on the non-va_list variants. */
13423 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13424 return NULL_TREE;
13425
13426 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13427 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13428 {
13429 if (!arg || !validate_arg (arg, POINTER_TYPE))
13430 return NULL_TREE;
13431 if (fn_puts)
13432 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13433 }
13434
13435 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13436 else if (strcmp (fmt_str, target_percent_c) == 0)
13437 {
13438 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13439 return NULL_TREE;
13440 if (fn_putchar)
13441 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13442 }
13443
13444 if (!call)
13445 return NULL_TREE;
13446
13447 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13448 }
13449
13450 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13451 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13452 more than 3 arguments, and ARG may be null in the 2-argument case.
13453
13454 Return NULL_TREE if no simplification was possible, otherwise return the
13455 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13456 code of the function to be simplified. */
13457
13458 static tree
13459 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13460 tree fmt, tree arg, bool ignore,
13461 enum built_in_function fcode)
13462 {
13463 tree fn_fputc, fn_fputs, call = NULL_TREE;
13464 const char *fmt_str = NULL;
13465
13466 /* If the return value is used, don't do the transformation. */
13467 if (! ignore)
13468 return NULL_TREE;
13469
13470 /* Verify the required arguments in the original call. */
13471 if (!validate_arg (fp, POINTER_TYPE))
13472 return NULL_TREE;
13473 if (!validate_arg (fmt, POINTER_TYPE))
13474 return NULL_TREE;
13475
13476 /* Check whether the format is a literal string constant. */
13477 fmt_str = c_getstr (fmt);
13478 if (fmt_str == NULL)
13479 return NULL_TREE;
13480
13481 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13482 {
13483 /* If we're using an unlocked function, assume the other
13484 unlocked functions exist explicitly. */
13485 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13486 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13487 }
13488 else
13489 {
13490 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13491 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13492 }
13493
13494 if (!init_target_chars ())
13495 return NULL_TREE;
13496
13497 /* If the format doesn't contain % args or %%, use strcpy. */
13498 if (strchr (fmt_str, target_percent) == NULL)
13499 {
13500 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13501 && arg)
13502 return NULL_TREE;
13503
13504 /* If the format specifier was "", fprintf does nothing. */
13505 if (fmt_str[0] == '\0')
13506 {
13507 /* If FP has side-effects, just wait until gimplification is
13508 done. */
13509 if (TREE_SIDE_EFFECTS (fp))
13510 return NULL_TREE;
13511
13512 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13513 }
13514
13515 /* When "string" doesn't contain %, replace all cases of
13516 fprintf (fp, string) with fputs (string, fp). The fputs
13517 builtin will take care of special cases like length == 1. */
13518 if (fn_fputs)
13519 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13520 }
13521
13522 /* The other optimizations can be done only on the non-va_list variants. */
13523 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13524 return NULL_TREE;
13525
13526 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13527 else if (strcmp (fmt_str, target_percent_s) == 0)
13528 {
13529 if (!arg || !validate_arg (arg, POINTER_TYPE))
13530 return NULL_TREE;
13531 if (fn_fputs)
13532 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13533 }
13534
13535 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13536 else if (strcmp (fmt_str, target_percent_c) == 0)
13537 {
13538 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13539 return NULL_TREE;
13540 if (fn_fputc)
13541 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13542 }
13543
13544 if (!call)
13545 return NULL_TREE;
13546 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13547 }
13548
13549 /* Initialize format string characters in the target charset. */
13550
13551 static bool
13552 init_target_chars (void)
13553 {
13554 static bool init;
13555 if (!init)
13556 {
13557 target_newline = lang_hooks.to_target_charset ('\n');
13558 target_percent = lang_hooks.to_target_charset ('%');
13559 target_c = lang_hooks.to_target_charset ('c');
13560 target_s = lang_hooks.to_target_charset ('s');
13561 if (target_newline == 0 || target_percent == 0 || target_c == 0
13562 || target_s == 0)
13563 return false;
13564
13565 target_percent_c[0] = target_percent;
13566 target_percent_c[1] = target_c;
13567 target_percent_c[2] = '\0';
13568
13569 target_percent_s[0] = target_percent;
13570 target_percent_s[1] = target_s;
13571 target_percent_s[2] = '\0';
13572
13573 target_percent_s_newline[0] = target_percent;
13574 target_percent_s_newline[1] = target_s;
13575 target_percent_s_newline[2] = target_newline;
13576 target_percent_s_newline[3] = '\0';
13577
13578 init = true;
13579 }
13580 return true;
13581 }
13582
13583 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13584 and no overflow/underflow occurred. INEXACT is true if M was not
13585 exactly calculated. TYPE is the tree type for the result. This
13586 function assumes that you cleared the MPFR flags and then
13587 calculated M to see if anything subsequently set a flag prior to
13588 entering this function. Return NULL_TREE if any checks fail. */
13589
13590 static tree
13591 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13592 {
13593 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13594 overflow/underflow occurred. If -frounding-math, proceed iff the
13595 result of calling FUNC was exact. */
13596 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13597 && (!flag_rounding_math || !inexact))
13598 {
13599 REAL_VALUE_TYPE rr;
13600
13601 real_from_mpfr (&rr, m, type, GMP_RNDN);
13602 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13603 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13604 but the mpft_t is not, then we underflowed in the
13605 conversion. */
13606 if (real_isfinite (&rr)
13607 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13608 {
13609 REAL_VALUE_TYPE rmode;
13610
13611 real_convert (&rmode, TYPE_MODE (type), &rr);
13612 /* Proceed iff the specified mode can hold the value. */
13613 if (real_identical (&rmode, &rr))
13614 return build_real (type, rmode);
13615 }
13616 }
13617 return NULL_TREE;
13618 }
13619
13620 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13621 number and no overflow/underflow occurred. INEXACT is true if M
13622 was not exactly calculated. TYPE is the tree type for the result.
13623 This function assumes that you cleared the MPFR flags and then
13624 calculated M to see if anything subsequently set a flag prior to
13625 entering this function. Return NULL_TREE if any checks fail, if
13626 FORCE_CONVERT is true, then bypass the checks. */
13627
13628 static tree
13629 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13630 {
13631 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13632 overflow/underflow occurred. If -frounding-math, proceed iff the
13633 result of calling FUNC was exact. */
13634 if (force_convert
13635 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13636 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13637 && (!flag_rounding_math || !inexact)))
13638 {
13639 REAL_VALUE_TYPE re, im;
13640
13641 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13642 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13643 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13644 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13645 but the mpft_t is not, then we underflowed in the
13646 conversion. */
13647 if (force_convert
13648 || (real_isfinite (&re) && real_isfinite (&im)
13649 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13650 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13651 {
13652 REAL_VALUE_TYPE re_mode, im_mode;
13653
13654 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13655 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13656 /* Proceed iff the specified mode can hold the value. */
13657 if (force_convert
13658 || (real_identical (&re_mode, &re)
13659 && real_identical (&im_mode, &im)))
13660 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13661 build_real (TREE_TYPE (type), im_mode));
13662 }
13663 }
13664 return NULL_TREE;
13665 }
13666
13667 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13668 FUNC on it and return the resulting value as a tree with type TYPE.
13669 If MIN and/or MAX are not NULL, then the supplied ARG must be
13670 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13671 acceptable values, otherwise they are not. The mpfr precision is
13672 set to the precision of TYPE. We assume that function FUNC returns
13673 zero if the result could be calculated exactly within the requested
13674 precision. */
13675
13676 static tree
13677 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13678 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13679 bool inclusive)
13680 {
13681 tree result = NULL_TREE;
13682
13683 STRIP_NOPS (arg);
13684
13685 /* To proceed, MPFR must exactly represent the target floating point
13686 format, which only happens when the target base equals two. */
13687 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13688 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13689 {
13690 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13691
13692 if (real_isfinite (ra)
13693 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13694 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13695 {
13696 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13697 const int prec = fmt->p;
13698 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13699 int inexact;
13700 mpfr_t m;
13701
13702 mpfr_init2 (m, prec);
13703 mpfr_from_real (m, ra, GMP_RNDN);
13704 mpfr_clear_flags ();
13705 inexact = func (m, m, rnd);
13706 result = do_mpfr_ckconv (m, type, inexact);
13707 mpfr_clear (m);
13708 }
13709 }
13710
13711 return result;
13712 }
13713
13714 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13715 FUNC on it and return the resulting value as a tree with type TYPE.
13716 The mpfr precision is set to the precision of TYPE. We assume that
13717 function FUNC returns zero if the result could be calculated
13718 exactly within the requested precision. */
13719
13720 static tree
13721 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13722 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13723 {
13724 tree result = NULL_TREE;
13725
13726 STRIP_NOPS (arg1);
13727 STRIP_NOPS (arg2);
13728
13729 /* To proceed, MPFR must exactly represent the target floating point
13730 format, which only happens when the target base equals two. */
13731 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13732 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13733 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13734 {
13735 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13736 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13737
13738 if (real_isfinite (ra1) && real_isfinite (ra2))
13739 {
13740 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13741 const int prec = fmt->p;
13742 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13743 int inexact;
13744 mpfr_t m1, m2;
13745
13746 mpfr_inits2 (prec, m1, m2, NULL);
13747 mpfr_from_real (m1, ra1, GMP_RNDN);
13748 mpfr_from_real (m2, ra2, GMP_RNDN);
13749 mpfr_clear_flags ();
13750 inexact = func (m1, m1, m2, rnd);
13751 result = do_mpfr_ckconv (m1, type, inexact);
13752 mpfr_clears (m1, m2, NULL);
13753 }
13754 }
13755
13756 return result;
13757 }
13758
13759 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13760 FUNC on it and return the resulting value as a tree with type TYPE.
13761 The mpfr precision is set to the precision of TYPE. We assume that
13762 function FUNC returns zero if the result could be calculated
13763 exactly within the requested precision. */
13764
13765 static tree
13766 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13767 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13768 {
13769 tree result = NULL_TREE;
13770
13771 STRIP_NOPS (arg1);
13772 STRIP_NOPS (arg2);
13773 STRIP_NOPS (arg3);
13774
13775 /* To proceed, MPFR must exactly represent the target floating point
13776 format, which only happens when the target base equals two. */
13777 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13778 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13779 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13780 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13781 {
13782 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13783 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13784 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13785
13786 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13787 {
13788 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13789 const int prec = fmt->p;
13790 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13791 int inexact;
13792 mpfr_t m1, m2, m3;
13793
13794 mpfr_inits2 (prec, m1, m2, m3, NULL);
13795 mpfr_from_real (m1, ra1, GMP_RNDN);
13796 mpfr_from_real (m2, ra2, GMP_RNDN);
13797 mpfr_from_real (m3, ra3, GMP_RNDN);
13798 mpfr_clear_flags ();
13799 inexact = func (m1, m1, m2, m3, rnd);
13800 result = do_mpfr_ckconv (m1, type, inexact);
13801 mpfr_clears (m1, m2, m3, NULL);
13802 }
13803 }
13804
13805 return result;
13806 }
13807
13808 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13809 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13810 If ARG_SINP and ARG_COSP are NULL then the result is returned
13811 as a complex value.
13812 The type is taken from the type of ARG and is used for setting the
13813 precision of the calculation and results. */
13814
13815 static tree
13816 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13817 {
13818 tree const type = TREE_TYPE (arg);
13819 tree result = NULL_TREE;
13820
13821 STRIP_NOPS (arg);
13822
13823 /* To proceed, MPFR must exactly represent the target floating point
13824 format, which only happens when the target base equals two. */
13825 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13826 && TREE_CODE (arg) == REAL_CST
13827 && !TREE_OVERFLOW (arg))
13828 {
13829 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13830
13831 if (real_isfinite (ra))
13832 {
13833 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13834 const int prec = fmt->p;
13835 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13836 tree result_s, result_c;
13837 int inexact;
13838 mpfr_t m, ms, mc;
13839
13840 mpfr_inits2 (prec, m, ms, mc, NULL);
13841 mpfr_from_real (m, ra, GMP_RNDN);
13842 mpfr_clear_flags ();
13843 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13844 result_s = do_mpfr_ckconv (ms, type, inexact);
13845 result_c = do_mpfr_ckconv (mc, type, inexact);
13846 mpfr_clears (m, ms, mc, NULL);
13847 if (result_s && result_c)
13848 {
13849 /* If we are to return in a complex value do so. */
13850 if (!arg_sinp && !arg_cosp)
13851 return build_complex (build_complex_type (type),
13852 result_c, result_s);
13853
13854 /* Dereference the sin/cos pointer arguments. */
13855 arg_sinp = build_fold_indirect_ref (arg_sinp);
13856 arg_cosp = build_fold_indirect_ref (arg_cosp);
13857 /* Proceed if valid pointer type were passed in. */
13858 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13859 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13860 {
13861 /* Set the values. */
13862 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13863 result_s);
13864 TREE_SIDE_EFFECTS (result_s) = 1;
13865 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13866 result_c);
13867 TREE_SIDE_EFFECTS (result_c) = 1;
13868 /* Combine the assignments into a compound expr. */
13869 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13870 result_s, result_c));
13871 }
13872 }
13873 }
13874 }
13875 return result;
13876 }
13877
13878 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13879 two-argument mpfr order N Bessel function FUNC on them and return
13880 the resulting value as a tree with type TYPE. The mpfr precision
13881 is set to the precision of TYPE. We assume that function FUNC
13882 returns zero if the result could be calculated exactly within the
13883 requested precision. */
13884 static tree
13885 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13886 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13887 const REAL_VALUE_TYPE *min, bool inclusive)
13888 {
13889 tree result = NULL_TREE;
13890
13891 STRIP_NOPS (arg1);
13892 STRIP_NOPS (arg2);
13893
13894 /* To proceed, MPFR must exactly represent the target floating point
13895 format, which only happens when the target base equals two. */
13896 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13897 && host_integerp (arg1, 0)
13898 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13899 {
13900 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13901 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13902
13903 if (n == (long)n
13904 && real_isfinite (ra)
13905 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13906 {
13907 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13908 const int prec = fmt->p;
13909 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13910 int inexact;
13911 mpfr_t m;
13912
13913 mpfr_init2 (m, prec);
13914 mpfr_from_real (m, ra, GMP_RNDN);
13915 mpfr_clear_flags ();
13916 inexact = func (m, n, m, rnd);
13917 result = do_mpfr_ckconv (m, type, inexact);
13918 mpfr_clear (m);
13919 }
13920 }
13921
13922 return result;
13923 }
13924
13925 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13926 the pointer *(ARG_QUO) and return the result. The type is taken
13927 from the type of ARG0 and is used for setting the precision of the
13928 calculation and results. */
13929
13930 static tree
13931 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13932 {
13933 tree const type = TREE_TYPE (arg0);
13934 tree result = NULL_TREE;
13935
13936 STRIP_NOPS (arg0);
13937 STRIP_NOPS (arg1);
13938
13939 /* To proceed, MPFR must exactly represent the target floating point
13940 format, which only happens when the target base equals two. */
13941 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13942 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13943 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13944 {
13945 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13946 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13947
13948 if (real_isfinite (ra0) && real_isfinite (ra1))
13949 {
13950 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13951 const int prec = fmt->p;
13952 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13953 tree result_rem;
13954 long integer_quo;
13955 mpfr_t m0, m1;
13956
13957 mpfr_inits2 (prec, m0, m1, NULL);
13958 mpfr_from_real (m0, ra0, GMP_RNDN);
13959 mpfr_from_real (m1, ra1, GMP_RNDN);
13960 mpfr_clear_flags ();
13961 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13962 /* Remquo is independent of the rounding mode, so pass
13963 inexact=0 to do_mpfr_ckconv(). */
13964 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13965 mpfr_clears (m0, m1, NULL);
13966 if (result_rem)
13967 {
13968 /* MPFR calculates quo in the host's long so it may
13969 return more bits in quo than the target int can hold
13970 if sizeof(host long) > sizeof(target int). This can
13971 happen even for native compilers in LP64 mode. In
13972 these cases, modulo the quo value with the largest
13973 number that the target int can hold while leaving one
13974 bit for the sign. */
13975 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13976 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13977
13978 /* Dereference the quo pointer argument. */
13979 arg_quo = build_fold_indirect_ref (arg_quo);
13980 /* Proceed iff a valid pointer type was passed in. */
13981 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13982 {
13983 /* Set the value. */
13984 tree result_quo
13985 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13986 build_int_cst (TREE_TYPE (arg_quo),
13987 integer_quo));
13988 TREE_SIDE_EFFECTS (result_quo) = 1;
13989 /* Combine the quo assignment with the rem. */
13990 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13991 result_quo, result_rem));
13992 }
13993 }
13994 }
13995 }
13996 return result;
13997 }
13998
13999 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14000 resulting value as a tree with type TYPE. The mpfr precision is
14001 set to the precision of TYPE. We assume that this mpfr function
14002 returns zero if the result could be calculated exactly within the
14003 requested precision. In addition, the integer pointer represented
14004 by ARG_SG will be dereferenced and set to the appropriate signgam
14005 (-1,1) value. */
14006
14007 static tree
14008 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
14009 {
14010 tree result = NULL_TREE;
14011
14012 STRIP_NOPS (arg);
14013
14014 /* To proceed, MPFR must exactly represent the target floating point
14015 format, which only happens when the target base equals two. Also
14016 verify ARG is a constant and that ARG_SG is an int pointer. */
14017 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14018 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14019 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14020 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14021 {
14022 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14023
14024 /* In addition to NaN and Inf, the argument cannot be zero or a
14025 negative integer. */
14026 if (real_isfinite (ra)
14027 && ra->cl != rvc_zero
14028 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
14029 {
14030 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14031 const int prec = fmt->p;
14032 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14033 int inexact, sg;
14034 mpfr_t m;
14035 tree result_lg;
14036
14037 mpfr_init2 (m, prec);
14038 mpfr_from_real (m, ra, GMP_RNDN);
14039 mpfr_clear_flags ();
14040 inexact = mpfr_lgamma (m, &sg, m, rnd);
14041 result_lg = do_mpfr_ckconv (m, type, inexact);
14042 mpfr_clear (m);
14043 if (result_lg)
14044 {
14045 tree result_sg;
14046
14047 /* Dereference the arg_sg pointer argument. */
14048 arg_sg = build_fold_indirect_ref (arg_sg);
14049 /* Assign the signgam value into *arg_sg. */
14050 result_sg = fold_build2 (MODIFY_EXPR,
14051 TREE_TYPE (arg_sg), arg_sg,
14052 build_int_cst (TREE_TYPE (arg_sg), sg));
14053 TREE_SIDE_EFFECTS (result_sg) = 1;
14054 /* Combine the signgam assignment with the lgamma result. */
14055 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14056 result_sg, result_lg));
14057 }
14058 }
14059 }
14060
14061 return result;
14062 }
14063
14064 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14065 function FUNC on it and return the resulting value as a tree with
14066 type TYPE. The mpfr precision is set to the precision of TYPE. We
14067 assume that function FUNC returns zero if the result could be
14068 calculated exactly within the requested precision. */
14069
14070 static tree
14071 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14072 {
14073 tree result = NULL_TREE;
14074
14075 STRIP_NOPS (arg);
14076
14077 /* To proceed, MPFR must exactly represent the target floating point
14078 format, which only happens when the target base equals two. */
14079 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14080 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14081 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14082 {
14083 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14084 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14085
14086 if (real_isfinite (re) && real_isfinite (im))
14087 {
14088 const struct real_format *const fmt =
14089 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14090 const int prec = fmt->p;
14091 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14092 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14093 int inexact;
14094 mpc_t m;
14095
14096 mpc_init2 (m, prec);
14097 mpfr_from_real (mpc_realref(m), re, rnd);
14098 mpfr_from_real (mpc_imagref(m), im, rnd);
14099 mpfr_clear_flags ();
14100 inexact = func (m, m, crnd);
14101 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14102 mpc_clear (m);
14103 }
14104 }
14105
14106 return result;
14107 }
14108
14109 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14110 mpc function FUNC on it and return the resulting value as a tree
14111 with type TYPE. The mpfr precision is set to the precision of
14112 TYPE. We assume that function FUNC returns zero if the result
14113 could be calculated exactly within the requested precision. If
14114 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14115 in the arguments and/or results. */
14116
14117 tree
14118 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14119 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14120 {
14121 tree result = NULL_TREE;
14122
14123 STRIP_NOPS (arg0);
14124 STRIP_NOPS (arg1);
14125
14126 /* To proceed, MPFR must exactly represent the target floating point
14127 format, which only happens when the target base equals two. */
14128 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14129 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14130 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14131 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14132 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14133 {
14134 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14135 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14136 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14137 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14138
14139 if (do_nonfinite
14140 || (real_isfinite (re0) && real_isfinite (im0)
14141 && real_isfinite (re1) && real_isfinite (im1)))
14142 {
14143 const struct real_format *const fmt =
14144 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14145 const int prec = fmt->p;
14146 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14147 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14148 int inexact;
14149 mpc_t m0, m1;
14150
14151 mpc_init2 (m0, prec);
14152 mpc_init2 (m1, prec);
14153 mpfr_from_real (mpc_realref(m0), re0, rnd);
14154 mpfr_from_real (mpc_imagref(m0), im0, rnd);
14155 mpfr_from_real (mpc_realref(m1), re1, rnd);
14156 mpfr_from_real (mpc_imagref(m1), im1, rnd);
14157 mpfr_clear_flags ();
14158 inexact = func (m0, m0, m1, crnd);
14159 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14160 mpc_clear (m0);
14161 mpc_clear (m1);
14162 }
14163 }
14164
14165 return result;
14166 }
14167
14168 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14169 a normal call should be emitted rather than expanding the function
14170 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14171
14172 static tree
14173 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14174 {
14175 int nargs = gimple_call_num_args (stmt);
14176
14177 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14178 (nargs > 0
14179 ? gimple_call_arg_ptr (stmt, 0)
14180 : &error_mark_node), fcode);
14181 }
14182
14183 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14184 a normal call should be emitted rather than expanding the function
14185 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14186 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14187 passed as second argument. */
14188
14189 tree
14190 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14191 enum built_in_function fcode)
14192 {
14193 int nargs = gimple_call_num_args (stmt);
14194
14195 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14196 (nargs > 0
14197 ? gimple_call_arg_ptr (stmt, 0)
14198 : &error_mark_node), maxlen, fcode);
14199 }
14200
14201 /* Builtins with folding operations that operate on "..." arguments
14202 need special handling; we need to store the arguments in a convenient
14203 data structure before attempting any folding. Fortunately there are
14204 only a few builtins that fall into this category. FNDECL is the
14205 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14206 result of the function call is ignored. */
14207
14208 static tree
14209 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14210 bool ignore ATTRIBUTE_UNUSED)
14211 {
14212 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14213 tree ret = NULL_TREE;
14214
14215 switch (fcode)
14216 {
14217 case BUILT_IN_SPRINTF_CHK:
14218 case BUILT_IN_VSPRINTF_CHK:
14219 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14220 break;
14221
14222 case BUILT_IN_SNPRINTF_CHK:
14223 case BUILT_IN_VSNPRINTF_CHK:
14224 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14225
14226 default:
14227 break;
14228 }
14229 if (ret)
14230 {
14231 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14232 TREE_NO_WARNING (ret) = 1;
14233 return ret;
14234 }
14235 return NULL_TREE;
14236 }
14237
14238 /* A wrapper function for builtin folding that prevents warnings for
14239 "statement without effect" and the like, caused by removing the
14240 call node earlier than the warning is generated. */
14241
14242 tree
14243 fold_call_stmt (gimple stmt, bool ignore)
14244 {
14245 tree ret = NULL_TREE;
14246 tree fndecl = gimple_call_fndecl (stmt);
14247 location_t loc = gimple_location (stmt);
14248 if (fndecl
14249 && TREE_CODE (fndecl) == FUNCTION_DECL
14250 && DECL_BUILT_IN (fndecl)
14251 && !gimple_call_va_arg_pack_p (stmt))
14252 {
14253 int nargs = gimple_call_num_args (stmt);
14254 tree *args = (nargs > 0
14255 ? gimple_call_arg_ptr (stmt, 0)
14256 : &error_mark_node);
14257
14258 if (avoid_folding_inline_builtin (fndecl))
14259 return NULL_TREE;
14260 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14261 {
14262 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14263 }
14264 else
14265 {
14266 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14267 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14268 if (!ret)
14269 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14270 if (ret)
14271 {
14272 /* Propagate location information from original call to
14273 expansion of builtin. Otherwise things like
14274 maybe_emit_chk_warning, that operate on the expansion
14275 of a builtin, will use the wrong location information. */
14276 if (gimple_has_location (stmt))
14277 {
14278 tree realret = ret;
14279 if (TREE_CODE (ret) == NOP_EXPR)
14280 realret = TREE_OPERAND (ret, 0);
14281 if (CAN_HAVE_LOCATION_P (realret)
14282 && !EXPR_HAS_LOCATION (realret))
14283 SET_EXPR_LOCATION (realret, loc);
14284 return realret;
14285 }
14286 return ret;
14287 }
14288 }
14289 }
14290 return NULL_TREE;
14291 }
14292
14293 /* Look up the function in builtin_decl that corresponds to DECL
14294 and set ASMSPEC as its user assembler name. DECL must be a
14295 function decl that declares a builtin. */
14296
14297 void
14298 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14299 {
14300 tree builtin;
14301 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14302 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14303 && asmspec != 0);
14304
14305 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14306 set_user_assembler_name (builtin, asmspec);
14307 switch (DECL_FUNCTION_CODE (decl))
14308 {
14309 case BUILT_IN_MEMCPY:
14310 init_block_move_fn (asmspec);
14311 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14312 break;
14313 case BUILT_IN_MEMSET:
14314 init_block_clear_fn (asmspec);
14315 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14316 break;
14317 case BUILT_IN_MEMMOVE:
14318 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14319 break;
14320 case BUILT_IN_MEMCMP:
14321 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14322 break;
14323 case BUILT_IN_ABORT:
14324 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14325 break;
14326 case BUILT_IN_FFS:
14327 if (INT_TYPE_SIZE < BITS_PER_WORD)
14328 {
14329 set_user_assembler_libfunc ("ffs", asmspec);
14330 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14331 MODE_INT, 0), "ffs");
14332 }
14333 break;
14334 default:
14335 break;
14336 }
14337 }
14338
14339 /* Return true if DECL is a builtin that expands to a constant or similarly
14340 simple code. */
14341 bool
14342 is_simple_builtin (tree decl)
14343 {
14344 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14345 switch (DECL_FUNCTION_CODE (decl))
14346 {
14347 /* Builtins that expand to constants. */
14348 case BUILT_IN_CONSTANT_P:
14349 case BUILT_IN_EXPECT:
14350 case BUILT_IN_OBJECT_SIZE:
14351 case BUILT_IN_UNREACHABLE:
14352 /* Simple register moves or loads from stack. */
14353 case BUILT_IN_ASSUME_ALIGNED:
14354 case BUILT_IN_RETURN_ADDRESS:
14355 case BUILT_IN_EXTRACT_RETURN_ADDR:
14356 case BUILT_IN_FROB_RETURN_ADDR:
14357 case BUILT_IN_RETURN:
14358 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14359 case BUILT_IN_FRAME_ADDRESS:
14360 case BUILT_IN_VA_END:
14361 case BUILT_IN_STACK_SAVE:
14362 case BUILT_IN_STACK_RESTORE:
14363 /* Exception state returns or moves registers around. */
14364 case BUILT_IN_EH_FILTER:
14365 case BUILT_IN_EH_POINTER:
14366 case BUILT_IN_EH_COPY_VALUES:
14367 return true;
14368
14369 default:
14370 return false;
14371 }
14372
14373 return false;
14374 }
14375
14376 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14377 most probably expanded inline into reasonably simple code. This is a
14378 superset of is_simple_builtin. */
14379 bool
14380 is_inexpensive_builtin (tree decl)
14381 {
14382 if (!decl)
14383 return false;
14384 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14385 return true;
14386 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14387 switch (DECL_FUNCTION_CODE (decl))
14388 {
14389 case BUILT_IN_ABS:
14390 case BUILT_IN_ALLOCA:
14391 case BUILT_IN_ALLOCA_WITH_ALIGN:
14392 case BUILT_IN_BSWAP16:
14393 case BUILT_IN_BSWAP32:
14394 case BUILT_IN_BSWAP64:
14395 case BUILT_IN_CLZ:
14396 case BUILT_IN_CLZIMAX:
14397 case BUILT_IN_CLZL:
14398 case BUILT_IN_CLZLL:
14399 case BUILT_IN_CTZ:
14400 case BUILT_IN_CTZIMAX:
14401 case BUILT_IN_CTZL:
14402 case BUILT_IN_CTZLL:
14403 case BUILT_IN_FFS:
14404 case BUILT_IN_FFSIMAX:
14405 case BUILT_IN_FFSL:
14406 case BUILT_IN_FFSLL:
14407 case BUILT_IN_IMAXABS:
14408 case BUILT_IN_FINITE:
14409 case BUILT_IN_FINITEF:
14410 case BUILT_IN_FINITEL:
14411 case BUILT_IN_FINITED32:
14412 case BUILT_IN_FINITED64:
14413 case BUILT_IN_FINITED128:
14414 case BUILT_IN_FPCLASSIFY:
14415 case BUILT_IN_ISFINITE:
14416 case BUILT_IN_ISINF_SIGN:
14417 case BUILT_IN_ISINF:
14418 case BUILT_IN_ISINFF:
14419 case BUILT_IN_ISINFL:
14420 case BUILT_IN_ISINFD32:
14421 case BUILT_IN_ISINFD64:
14422 case BUILT_IN_ISINFD128:
14423 case BUILT_IN_ISNAN:
14424 case BUILT_IN_ISNANF:
14425 case BUILT_IN_ISNANL:
14426 case BUILT_IN_ISNAND32:
14427 case BUILT_IN_ISNAND64:
14428 case BUILT_IN_ISNAND128:
14429 case BUILT_IN_ISNORMAL:
14430 case BUILT_IN_ISGREATER:
14431 case BUILT_IN_ISGREATEREQUAL:
14432 case BUILT_IN_ISLESS:
14433 case BUILT_IN_ISLESSEQUAL:
14434 case BUILT_IN_ISLESSGREATER:
14435 case BUILT_IN_ISUNORDERED:
14436 case BUILT_IN_VA_ARG_PACK:
14437 case BUILT_IN_VA_ARG_PACK_LEN:
14438 case BUILT_IN_VA_COPY:
14439 case BUILT_IN_TRAP:
14440 case BUILT_IN_SAVEREGS:
14441 case BUILT_IN_POPCOUNTL:
14442 case BUILT_IN_POPCOUNTLL:
14443 case BUILT_IN_POPCOUNTIMAX:
14444 case BUILT_IN_POPCOUNT:
14445 case BUILT_IN_PARITYL:
14446 case BUILT_IN_PARITYLL:
14447 case BUILT_IN_PARITYIMAX:
14448 case BUILT_IN_PARITY:
14449 case BUILT_IN_LABS:
14450 case BUILT_IN_LLABS:
14451 case BUILT_IN_PREFETCH:
14452 return true;
14453
14454 default:
14455 return is_simple_builtin (decl);
14456 }
14457
14458 return false;
14459 }