symtab.c (change_decl_assembler_name): Fix transparent alias chain construction.
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
62
63
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
65
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
70
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
74
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
77 {
78 #include "builtins.def"
79 };
80 #undef DEF_BUILTIN
81
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
85
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
88
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree, tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static void expand_builtin_update_setjmp_buf (rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strcmp (tree, rtx);
122 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
123 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_expect (location_t, tree, tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree fold_builtin_nan (tree, tree, int);
148 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
149 static bool validate_arg (const_tree, enum tree_code code);
150 static bool integer_valued_real_p (tree);
151 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
152 static bool readonly_data_expr (tree);
153 static rtx expand_builtin_fabs (tree, rtx, rtx);
154 static rtx expand_builtin_signbit (tree, rtx);
155 static tree fold_builtin_sqrt (location_t, tree, tree);
156 static tree fold_builtin_cbrt (location_t, tree, tree);
157 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
159 static tree fold_builtin_cos (location_t, tree, tree, tree);
160 static tree fold_builtin_cosh (location_t, tree, tree, tree);
161 static tree fold_builtin_tan (tree, tree);
162 static tree fold_builtin_trunc (location_t, tree, tree);
163 static tree fold_builtin_floor (location_t, tree, tree);
164 static tree fold_builtin_ceil (location_t, tree, tree);
165 static tree fold_builtin_round (location_t, tree, tree);
166 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
167 static tree fold_builtin_bitop (tree, tree);
168 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
169 static tree fold_builtin_strchr (location_t, tree, tree, tree);
170 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
172 static tree fold_builtin_strcmp (location_t, tree, tree);
173 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
174 static tree fold_builtin_signbit (location_t, tree, tree);
175 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
176 static tree fold_builtin_isascii (location_t, tree);
177 static tree fold_builtin_toascii (location_t, tree);
178 static tree fold_builtin_isdigit (location_t, tree);
179 static tree fold_builtin_fabs (location_t, tree, tree);
180 static tree fold_builtin_abs (location_t, tree, tree);
181 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
182 enum tree_code);
183 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
184 static tree fold_builtin_0 (location_t, tree, bool);
185 static tree fold_builtin_1 (location_t, tree, tree, bool);
186 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
187 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
188 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
189 static tree fold_builtin_varargs (location_t, tree, tree, bool);
190
191 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
192 static tree fold_builtin_strstr (location_t, tree, tree, tree);
193 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
194 static tree fold_builtin_strncat (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
198 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
199
200 static rtx expand_builtin_object_size (tree);
201 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
202 enum built_in_function);
203 static void maybe_emit_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
205 static void maybe_emit_free_warning (tree);
206 static tree fold_builtin_object_size (tree, tree);
207 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
208 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
209 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
210 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
211 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
212 enum built_in_function);
213 static bool init_target_chars (void);
214
215 static unsigned HOST_WIDE_INT target_newline;
216 static unsigned HOST_WIDE_INT target_percent;
217 static unsigned HOST_WIDE_INT target_c;
218 static unsigned HOST_WIDE_INT target_s;
219 static char target_percent_c[3];
220 static char target_percent_s[3];
221 static char target_percent_s_newline[4];
222 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_arg2 (tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_arg3 (tree, tree, tree, tree,
227 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
228 static tree do_mpfr_sincos (tree, tree, tree);
229 static tree do_mpfr_bessel_n (tree, tree, tree,
230 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
231 const REAL_VALUE_TYPE *, bool);
232 static tree do_mpfr_remquo (tree, tree, tree);
233 static tree do_mpfr_lgamma_r (tree, tree, tree);
234 static void expand_builtin_sync_synchronize (void);
235
236 /* Return true if NAME starts with __builtin_ or __sync_. */
237
238 static bool
239 is_builtin_name (const char *name)
240 {
241 if (strncmp (name, "__builtin_", 10) == 0)
242 return true;
243 if (strncmp (name, "__sync_", 7) == 0)
244 return true;
245 if (strncmp (name, "__atomic_", 9) == 0)
246 return true;
247 if (flag_cilkplus
248 && (!strcmp (name, "__cilkrts_detach")
249 || !strcmp (name, "__cilkrts_pop_frame")))
250 return true;
251 return false;
252 }
253
254
255 /* Return true if DECL is a function symbol representing a built-in. */
256
257 bool
258 is_builtin_fn (tree decl)
259 {
260 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
261 }
262
263 /* By default we assume that c99 functions are present at the runtime,
264 but sincos is not. */
265 bool
266 default_libc_has_function (enum function_class fn_class)
267 {
268 if (fn_class == function_c94
269 || fn_class == function_c99_misc
270 || fn_class == function_c99_math_complex)
271 return true;
272
273 return false;
274 }
275
276 bool
277 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
278 {
279 return true;
280 }
281
282 bool
283 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
284 {
285 return false;
286 }
287
288 /* Return true if NODE should be considered for inline expansion regardless
289 of the optimization level. This means whenever a function is invoked with
290 its "internal" name, which normally contains the prefix "__builtin". */
291
292 static bool
293 called_as_built_in (tree node)
294 {
295 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
296 we want the name used to call the function, not the name it
297 will have. */
298 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
299 return is_builtin_name (name);
300 }
301
302 /* Compute values M and N such that M divides (address of EXP - N) and such
303 that N < M. If these numbers can be determined, store M in alignp and N in
304 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
305 *alignp and any bit-offset to *bitposp.
306
307 Note that the address (and thus the alignment) computed here is based
308 on the address to which a symbol resolves, whereas DECL_ALIGN is based
309 on the address at which an object is actually located. These two
310 addresses are not always the same. For example, on ARM targets,
311 the address &foo of a Thumb function foo() has the lowest bit set,
312 whereas foo() itself starts on an even address.
313
314 If ADDR_P is true we are taking the address of the memory reference EXP
315 and thus cannot rely on the access taking place. */
316
317 static bool
318 get_object_alignment_2 (tree exp, unsigned int *alignp,
319 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
320 {
321 HOST_WIDE_INT bitsize, bitpos;
322 tree offset;
323 enum machine_mode mode;
324 int unsignedp, volatilep;
325 unsigned int align = BITS_PER_UNIT;
326 bool known_alignment = false;
327
328 /* Get the innermost object and the constant (bitpos) and possibly
329 variable (offset) offset of the access. */
330 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
331 &mode, &unsignedp, &volatilep, true);
332
333 /* Extract alignment information from the innermost object and
334 possibly adjust bitpos and offset. */
335 if (TREE_CODE (exp) == FUNCTION_DECL)
336 {
337 /* Function addresses can encode extra information besides their
338 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
339 allows the low bit to be used as a virtual bit, we know
340 that the address itself must be at least 2-byte aligned. */
341 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
342 align = 2 * BITS_PER_UNIT;
343 }
344 else if (TREE_CODE (exp) == LABEL_DECL)
345 ;
346 else if (TREE_CODE (exp) == CONST_DECL)
347 {
348 /* The alignment of a CONST_DECL is determined by its initializer. */
349 exp = DECL_INITIAL (exp);
350 align = TYPE_ALIGN (TREE_TYPE (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 if (CONSTANT_CLASS_P (exp))
353 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
354 #endif
355 known_alignment = true;
356 }
357 else if (DECL_P (exp))
358 {
359 align = DECL_ALIGN (exp);
360 known_alignment = true;
361 }
362 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
363 {
364 align = TYPE_ALIGN (TREE_TYPE (exp));
365 }
366 else if (TREE_CODE (exp) == INDIRECT_REF
367 || TREE_CODE (exp) == MEM_REF
368 || TREE_CODE (exp) == TARGET_MEM_REF)
369 {
370 tree addr = TREE_OPERAND (exp, 0);
371 unsigned ptr_align;
372 unsigned HOST_WIDE_INT ptr_bitpos;
373
374 if (TREE_CODE (addr) == BIT_AND_EXPR
375 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
376 {
377 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
378 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
379 align *= BITS_PER_UNIT;
380 addr = TREE_OPERAND (addr, 0);
381 }
382
383 known_alignment
384 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
385 align = MAX (ptr_align, align);
386
387 /* The alignment of the pointer operand in a TARGET_MEM_REF
388 has to take the variable offset parts into account. */
389 if (TREE_CODE (exp) == TARGET_MEM_REF)
390 {
391 if (TMR_INDEX (exp))
392 {
393 unsigned HOST_WIDE_INT step = 1;
394 if (TMR_STEP (exp))
395 step = TREE_INT_CST_LOW (TMR_STEP (exp));
396 align = MIN (align, (step & -step) * BITS_PER_UNIT);
397 }
398 if (TMR_INDEX2 (exp))
399 align = BITS_PER_UNIT;
400 known_alignment = false;
401 }
402
403 /* When EXP is an actual memory reference then we can use
404 TYPE_ALIGN of a pointer indirection to derive alignment.
405 Do so only if get_pointer_alignment_1 did not reveal absolute
406 alignment knowledge and if using that alignment would
407 improve the situation. */
408 if (!addr_p && !known_alignment
409 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
410 align = TYPE_ALIGN (TREE_TYPE (exp));
411 else
412 {
413 /* Else adjust bitpos accordingly. */
414 bitpos += ptr_bitpos;
415 if (TREE_CODE (exp) == MEM_REF
416 || TREE_CODE (exp) == TARGET_MEM_REF)
417 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
418 }
419 }
420 else if (TREE_CODE (exp) == STRING_CST)
421 {
422 /* STRING_CST are the only constant objects we allow to be not
423 wrapped inside a CONST_DECL. */
424 align = TYPE_ALIGN (TREE_TYPE (exp));
425 #ifdef CONSTANT_ALIGNMENT
426 if (CONSTANT_CLASS_P (exp))
427 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
428 #endif
429 known_alignment = true;
430 }
431
432 /* If there is a non-constant offset part extract the maximum
433 alignment that can prevail. */
434 if (offset)
435 {
436 unsigned int trailing_zeros = tree_ctz (offset);
437 if (trailing_zeros < HOST_BITS_PER_INT)
438 {
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
442 }
443 }
444
445 *alignp = align;
446 *bitposp = bitpos & (*alignp - 1);
447 return known_alignment;
448 }
449
450 /* For a memory reference expression EXP compute values M and N such that M
451 divides (&EXP - N) and such that N < M. If these numbers can be determined,
452 store M in alignp and N in *BITPOSP and return true. Otherwise return false
453 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
454
455 bool
456 get_object_alignment_1 (tree exp, unsigned int *alignp,
457 unsigned HOST_WIDE_INT *bitposp)
458 {
459 return get_object_alignment_2 (exp, alignp, bitposp, false);
460 }
461
462 /* Return the alignment in bits of EXP, an object. */
463
464 unsigned int
465 get_object_alignment (tree exp)
466 {
467 unsigned HOST_WIDE_INT bitpos = 0;
468 unsigned int align;
469
470 get_object_alignment_1 (exp, &align, &bitpos);
471
472 /* align and bitpos now specify known low bits of the pointer.
473 ptr & (align - 1) == bitpos. */
474
475 if (bitpos != 0)
476 align = (bitpos & -bitpos);
477 return align;
478 }
479
480 /* For a pointer valued expression EXP compute values M and N such that M
481 divides (EXP - N) and such that N < M. If these numbers can be determined,
482 store M in alignp and N in *BITPOSP and return true. Return false if
483 the results are just a conservative approximation.
484
485 If EXP is not a pointer, false is returned too. */
486
487 bool
488 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
489 unsigned HOST_WIDE_INT *bitposp)
490 {
491 STRIP_NOPS (exp);
492
493 if (TREE_CODE (exp) == ADDR_EXPR)
494 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
495 alignp, bitposp, true);
496 else if (TREE_CODE (exp) == SSA_NAME
497 && POINTER_TYPE_P (TREE_TYPE (exp)))
498 {
499 unsigned int ptr_align, ptr_misalign;
500 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
501
502 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
503 {
504 *bitposp = ptr_misalign * BITS_PER_UNIT;
505 *alignp = ptr_align * BITS_PER_UNIT;
506 /* We cannot really tell whether this result is an approximation. */
507 return true;
508 }
509 else
510 {
511 *bitposp = 0;
512 *alignp = BITS_PER_UNIT;
513 return false;
514 }
515 }
516 else if (TREE_CODE (exp) == INTEGER_CST)
517 {
518 *alignp = BIGGEST_ALIGNMENT;
519 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
520 & (BIGGEST_ALIGNMENT - 1));
521 return true;
522 }
523
524 *bitposp = 0;
525 *alignp = BITS_PER_UNIT;
526 return false;
527 }
528
529 /* Return the alignment in bits of EXP, a pointer valued expression.
530 The alignment returned is, by default, the alignment of the thing that
531 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
532
533 Otherwise, look at the expression to see if we can do better, i.e., if the
534 expression is actually pointing at an object whose alignment is tighter. */
535
536 unsigned int
537 get_pointer_alignment (tree exp)
538 {
539 unsigned HOST_WIDE_INT bitpos = 0;
540 unsigned int align;
541
542 get_pointer_alignment_1 (exp, &align, &bitpos);
543
544 /* align and bitpos now specify known low bits of the pointer.
545 ptr & (align - 1) == bitpos. */
546
547 if (bitpos != 0)
548 align = (bitpos & -bitpos);
549
550 return align;
551 }
552
553 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
554 way, because it could contain a zero byte in the middle.
555 TREE_STRING_LENGTH is the size of the character array, not the string.
556
557 ONLY_VALUE should be nonzero if the result is not going to be emitted
558 into the instruction stream and zero if it is going to be expanded.
559 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
560 is returned, otherwise NULL, since
561 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
562 evaluate the side-effects.
563
564 The value returned is of type `ssizetype'.
565
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
568
569 tree
570 c_strlen (tree src, int only_value)
571 {
572 tree offset_node;
573 HOST_WIDE_INT offset;
574 int max;
575 const char *ptr;
576 location_t loc;
577
578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 {
582 tree len1, len2;
583
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
586 if (tree_int_cst_equal (len1, len2))
587 return len1;
588 }
589
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
593
594 loc = EXPR_LOC_OR_LOC (src, input_location);
595
596 src = string_constant (src, &offset_node);
597 if (src == 0)
598 return NULL_TREE;
599
600 max = TREE_STRING_LENGTH (src) - 1;
601 ptr = TREE_STRING_POINTER (src);
602
603 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
604 {
605 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
606 compute the offset to the following null if we don't know where to
607 start searching for it. */
608 int i;
609
610 for (i = 0; i < max; i++)
611 if (ptr[i] == 0)
612 return NULL_TREE;
613
614 /* We don't know the starting offset, but we do know that the string
615 has no internal zero bytes. We can assume that the offset falls
616 within the bounds of the string; otherwise, the programmer deserves
617 what he gets. Subtract the offset from the length of the string,
618 and return that. This would perhaps not be valid if we were dealing
619 with named arrays in addition to literal string constants. */
620
621 return size_diffop_loc (loc, size_int (max), offset_node);
622 }
623
624 /* We have a known offset into the string. Start searching there for
625 a null character if we can represent it as a single HOST_WIDE_INT. */
626 if (offset_node == 0)
627 offset = 0;
628 else if (! tree_fits_shwi_p (offset_node))
629 offset = -1;
630 else
631 offset = tree_to_shwi (offset_node);
632
633 /* If the offset is known to be out of bounds, warn, and call strlen at
634 runtime. */
635 if (offset < 0 || offset > max)
636 {
637 /* Suppress multiple warnings for propagated constant strings. */
638 if (! TREE_NO_WARNING (src))
639 {
640 warning_at (loc, 0, "offset outside bounds of constant string");
641 TREE_NO_WARNING (src) = 1;
642 }
643 return NULL_TREE;
644 }
645
646 /* Use strlen to search for the first zero byte. Since any strings
647 constructed with build_string will have nulls appended, we win even
648 if we get handed something like (char[4])"abcd".
649
650 Since OFFSET is our starting index into the string, no further
651 calculation is needed. */
652 return ssize_int (strlen (ptr + offset));
653 }
654
655 /* Return a char pointer for a C string if it is a string constant
656 or sum of string constant and integer constant. */
657
658 static const char *
659 c_getstr (tree src)
660 {
661 tree offset_node;
662
663 src = string_constant (src, &offset_node);
664 if (src == 0)
665 return 0;
666
667 if (offset_node == 0)
668 return TREE_STRING_POINTER (src);
669 else if (!tree_fits_uhwi_p (offset_node)
670 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
671 return 0;
672
673 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
674 }
675
676 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
677 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
678
679 static rtx
680 c_readstr (const char *str, enum machine_mode mode)
681 {
682 HOST_WIDE_INT c[2];
683 HOST_WIDE_INT ch;
684 unsigned int i, j;
685
686 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
687
688 c[0] = 0;
689 c[1] = 0;
690 ch = 1;
691 for (i = 0; i < GET_MODE_SIZE (mode); i++)
692 {
693 j = i;
694 if (WORDS_BIG_ENDIAN)
695 j = GET_MODE_SIZE (mode) - i - 1;
696 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
697 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
698 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
699 j *= BITS_PER_UNIT;
700 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
701
702 if (ch)
703 ch = (unsigned char) str[i];
704 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
705 }
706 return immed_double_const (c[0], c[1], mode);
707 }
708
709 /* Cast a target constant CST to target CHAR and if that value fits into
710 host char type, return zero and put that value into variable pointed to by
711 P. */
712
713 static int
714 target_char_cast (tree cst, char *p)
715 {
716 unsigned HOST_WIDE_INT val, hostval;
717
718 if (TREE_CODE (cst) != INTEGER_CST
719 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
720 return 1;
721
722 val = TREE_INT_CST_LOW (cst);
723 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
724 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
725
726 hostval = val;
727 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
728 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
729
730 if (val != hostval)
731 return 1;
732
733 *p = hostval;
734 return 0;
735 }
736
737 /* Similar to save_expr, but assumes that arbitrary code is not executed
738 in between the multiple evaluations. In particular, we assume that a
739 non-addressable local variable will not be modified. */
740
741 static tree
742 builtin_save_expr (tree exp)
743 {
744 if (TREE_CODE (exp) == SSA_NAME
745 || (TREE_ADDRESSABLE (exp) == 0
746 && (TREE_CODE (exp) == PARM_DECL
747 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
748 return exp;
749
750 return save_expr (exp);
751 }
752
753 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
754 times to get the address of either a higher stack frame, or a return
755 address located within it (depending on FNDECL_CODE). */
756
757 static rtx
758 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
759 {
760 int i;
761
762 #ifdef INITIAL_FRAME_ADDRESS_RTX
763 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
764 #else
765 rtx tem;
766
767 /* For a zero count with __builtin_return_address, we don't care what
768 frame address we return, because target-specific definitions will
769 override us. Therefore frame pointer elimination is OK, and using
770 the soft frame pointer is OK.
771
772 For a nonzero count, or a zero count with __builtin_frame_address,
773 we require a stable offset from the current frame pointer to the
774 previous one, so we must use the hard frame pointer, and
775 we must disable frame pointer elimination. */
776 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
777 tem = frame_pointer_rtx;
778 else
779 {
780 tem = hard_frame_pointer_rtx;
781
782 /* Tell reload not to eliminate the frame pointer. */
783 crtl->accesses_prior_frames = 1;
784 }
785 #endif
786
787 /* Some machines need special handling before we can access
788 arbitrary frames. For example, on the SPARC, we must first flush
789 all register windows to the stack. */
790 #ifdef SETUP_FRAME_ADDRESSES
791 if (count > 0)
792 SETUP_FRAME_ADDRESSES ();
793 #endif
794
795 /* On the SPARC, the return address is not in the frame, it is in a
796 register. There is no way to access it off of the current frame
797 pointer, but it can be accessed off the previous frame pointer by
798 reading the value from the register window save area. */
799 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
800 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
801 count--;
802 #endif
803
804 /* Scan back COUNT frames to the specified frame. */
805 for (i = 0; i < count; i++)
806 {
807 /* Assume the dynamic chain pointer is in the word that the
808 frame address points to, unless otherwise specified. */
809 #ifdef DYNAMIC_CHAIN_ADDRESS
810 tem = DYNAMIC_CHAIN_ADDRESS (tem);
811 #endif
812 tem = memory_address (Pmode, tem);
813 tem = gen_frame_mem (Pmode, tem);
814 tem = copy_to_reg (tem);
815 }
816
817 /* For __builtin_frame_address, return what we've got. But, on
818 the SPARC for example, we may have to add a bias. */
819 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
820 #ifdef FRAME_ADDR_RTX
821 return FRAME_ADDR_RTX (tem);
822 #else
823 return tem;
824 #endif
825
826 /* For __builtin_return_address, get the return address from that frame. */
827 #ifdef RETURN_ADDR_RTX
828 tem = RETURN_ADDR_RTX (count, tem);
829 #else
830 tem = memory_address (Pmode,
831 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
832 tem = gen_frame_mem (Pmode, tem);
833 #endif
834 return tem;
835 }
836
837 /* Alias set used for setjmp buffer. */
838 static alias_set_type setjmp_alias_set = -1;
839
840 /* Construct the leading half of a __builtin_setjmp call. Control will
841 return to RECEIVER_LABEL. This is also called directly by the SJLJ
842 exception handling code. */
843
844 void
845 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
846 {
847 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
848 rtx stack_save;
849 rtx mem;
850
851 if (setjmp_alias_set == -1)
852 setjmp_alias_set = new_alias_set ();
853
854 buf_addr = convert_memory_address (Pmode, buf_addr);
855
856 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
857
858 /* We store the frame pointer and the address of receiver_label in
859 the buffer and use the rest of it for the stack save area, which
860 is machine-dependent. */
861
862 mem = gen_rtx_MEM (Pmode, buf_addr);
863 set_mem_alias_set (mem, setjmp_alias_set);
864 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
865
866 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
867 GET_MODE_SIZE (Pmode))),
868 set_mem_alias_set (mem, setjmp_alias_set);
869
870 emit_move_insn (validize_mem (mem),
871 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
872
873 stack_save = gen_rtx_MEM (sa_mode,
874 plus_constant (Pmode, buf_addr,
875 2 * GET_MODE_SIZE (Pmode)));
876 set_mem_alias_set (stack_save, setjmp_alias_set);
877 emit_stack_save (SAVE_NONLOCAL, &stack_save);
878
879 /* If there is further processing to do, do it. */
880 #ifdef HAVE_builtin_setjmp_setup
881 if (HAVE_builtin_setjmp_setup)
882 emit_insn (gen_builtin_setjmp_setup (buf_addr));
883 #endif
884
885 /* We have a nonlocal label. */
886 cfun->has_nonlocal_label = 1;
887 }
888
889 /* Construct the trailing part of a __builtin_setjmp call. This is
890 also called directly by the SJLJ exception handling code.
891 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
892
893 void
894 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
895 {
896 rtx chain;
897
898 /* Mark the FP as used when we get here, so we have to make sure it's
899 marked as used by this function. */
900 emit_use (hard_frame_pointer_rtx);
901
902 /* Mark the static chain as clobbered here so life information
903 doesn't get messed up for it. */
904 chain = targetm.calls.static_chain (current_function_decl, true);
905 if (chain && REG_P (chain))
906 emit_clobber (chain);
907
908 /* Now put in the code to restore the frame pointer, and argument
909 pointer, if needed. */
910 #ifdef HAVE_nonlocal_goto
911 if (! HAVE_nonlocal_goto)
912 #endif
913 {
914 /* First adjust our frame pointer to its actual value. It was
915 previously set to the start of the virtual area corresponding to
916 the stacked variables when we branched here and now needs to be
917 adjusted to the actual hardware fp value.
918
919 Assignments to virtual registers are converted by
920 instantiate_virtual_regs into the corresponding assignment
921 to the underlying register (fp in this case) that makes
922 the original assignment true.
923 So the following insn will actually be decrementing fp by
924 STARTING_FRAME_OFFSET. */
925 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
926
927 /* Restoring the frame pointer also modifies the hard frame pointer.
928 Mark it used (so that the previous assignment remains live once
929 the frame pointer is eliminated) and clobbered (to represent the
930 implicit update from the assignment). */
931 emit_use (hard_frame_pointer_rtx);
932 emit_clobber (hard_frame_pointer_rtx);
933 }
934
935 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
936 if (fixed_regs[ARG_POINTER_REGNUM])
937 {
938 #ifdef ELIMINABLE_REGS
939 /* If the argument pointer can be eliminated in favor of the
940 frame pointer, we don't need to restore it. We assume here
941 that if such an elimination is present, it can always be used.
942 This is the case on all known machines; if we don't make this
943 assumption, we do unnecessary saving on many machines. */
944 size_t i;
945 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
946
947 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
948 if (elim_regs[i].from == ARG_POINTER_REGNUM
949 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
950 break;
951
952 if (i == ARRAY_SIZE (elim_regs))
953 #endif
954 {
955 /* Now restore our arg pointer from the address at which it
956 was saved in our stack frame. */
957 emit_move_insn (crtl->args.internal_arg_pointer,
958 copy_to_reg (get_arg_pointer_save_area ()));
959 }
960 }
961 #endif
962
963 #ifdef HAVE_builtin_setjmp_receiver
964 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
965 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
966 else
967 #endif
968 #ifdef HAVE_nonlocal_goto_receiver
969 if (HAVE_nonlocal_goto_receiver)
970 emit_insn (gen_nonlocal_goto_receiver ());
971 else
972 #endif
973 { /* Nothing */ }
974
975 /* We must not allow the code we just generated to be reordered by
976 scheduling. Specifically, the update of the frame pointer must
977 happen immediately, not later. */
978 emit_insn (gen_blockage ());
979 }
980
981 /* __builtin_longjmp is passed a pointer to an array of five words (not
982 all will be used on all machines). It operates similarly to the C
983 library function of the same name, but is more efficient. Much of
984 the code below is copied from the handling of non-local gotos. */
985
986 static void
987 expand_builtin_longjmp (rtx buf_addr, rtx value)
988 {
989 rtx fp, lab, stack, insn, last;
990 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
991
992 /* DRAP is needed for stack realign if longjmp is expanded to current
993 function */
994 if (SUPPORTS_STACK_ALIGNMENT)
995 crtl->need_drap = true;
996
997 if (setjmp_alias_set == -1)
998 setjmp_alias_set = new_alias_set ();
999
1000 buf_addr = convert_memory_address (Pmode, buf_addr);
1001
1002 buf_addr = force_reg (Pmode, buf_addr);
1003
1004 /* We require that the user must pass a second argument of 1, because
1005 that is what builtin_setjmp will return. */
1006 gcc_assert (value == const1_rtx);
1007
1008 last = get_last_insn ();
1009 #ifdef HAVE_builtin_longjmp
1010 if (HAVE_builtin_longjmp)
1011 emit_insn (gen_builtin_longjmp (buf_addr));
1012 else
1013 #endif
1014 {
1015 fp = gen_rtx_MEM (Pmode, buf_addr);
1016 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1017 GET_MODE_SIZE (Pmode)));
1018
1019 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1020 2 * GET_MODE_SIZE (Pmode)));
1021 set_mem_alias_set (fp, setjmp_alias_set);
1022 set_mem_alias_set (lab, setjmp_alias_set);
1023 set_mem_alias_set (stack, setjmp_alias_set);
1024
1025 /* Pick up FP, label, and SP from the block and jump. This code is
1026 from expand_goto in stmt.c; see there for detailed comments. */
1027 #ifdef HAVE_nonlocal_goto
1028 if (HAVE_nonlocal_goto)
1029 /* We have to pass a value to the nonlocal_goto pattern that will
1030 get copied into the static_chain pointer, but it does not matter
1031 what that value is, because builtin_setjmp does not use it. */
1032 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1033 else
1034 #endif
1035 {
1036 lab = copy_to_reg (lab);
1037
1038 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1039 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1040
1041 emit_move_insn (hard_frame_pointer_rtx, fp);
1042 emit_stack_restore (SAVE_NONLOCAL, stack);
1043
1044 emit_use (hard_frame_pointer_rtx);
1045 emit_use (stack_pointer_rtx);
1046 emit_indirect_jump (lab);
1047 }
1048 }
1049
1050 /* Search backwards and mark the jump insn as a non-local goto.
1051 Note that this precludes the use of __builtin_longjmp to a
1052 __builtin_setjmp target in the same function. However, we've
1053 already cautioned the user that these functions are for
1054 internal exception handling use only. */
1055 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1056 {
1057 gcc_assert (insn != last);
1058
1059 if (JUMP_P (insn))
1060 {
1061 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1062 break;
1063 }
1064 else if (CALL_P (insn))
1065 break;
1066 }
1067 }
1068
1069 static inline bool
1070 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1071 {
1072 return (iter->i < iter->n);
1073 }
1074
1075 /* This function validates the types of a function call argument list
1076 against a specified list of tree_codes. If the last specifier is a 0,
1077 that represents an ellipses, otherwise the last specifier must be a
1078 VOID_TYPE. */
1079
1080 static bool
1081 validate_arglist (const_tree callexpr, ...)
1082 {
1083 enum tree_code code;
1084 bool res = 0;
1085 va_list ap;
1086 const_call_expr_arg_iterator iter;
1087 const_tree arg;
1088
1089 va_start (ap, callexpr);
1090 init_const_call_expr_arg_iterator (callexpr, &iter);
1091
1092 do
1093 {
1094 code = (enum tree_code) va_arg (ap, int);
1095 switch (code)
1096 {
1097 case 0:
1098 /* This signifies an ellipses, any further arguments are all ok. */
1099 res = true;
1100 goto end;
1101 case VOID_TYPE:
1102 /* This signifies an endlink, if no arguments remain, return
1103 true, otherwise return false. */
1104 res = !more_const_call_expr_args_p (&iter);
1105 goto end;
1106 default:
1107 /* If no parameters remain or the parameter's code does not
1108 match the specified code, return false. Otherwise continue
1109 checking any remaining arguments. */
1110 arg = next_const_call_expr_arg (&iter);
1111 if (!validate_arg (arg, code))
1112 goto end;
1113 break;
1114 }
1115 }
1116 while (1);
1117
1118 /* We need gotos here since we can only have one VA_CLOSE in a
1119 function. */
1120 end: ;
1121 va_end (ap);
1122
1123 return res;
1124 }
1125
1126 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1127 and the address of the save area. */
1128
1129 static rtx
1130 expand_builtin_nonlocal_goto (tree exp)
1131 {
1132 tree t_label, t_save_area;
1133 rtx r_label, r_save_area, r_fp, r_sp, insn;
1134
1135 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1136 return NULL_RTX;
1137
1138 t_label = CALL_EXPR_ARG (exp, 0);
1139 t_save_area = CALL_EXPR_ARG (exp, 1);
1140
1141 r_label = expand_normal (t_label);
1142 r_label = convert_memory_address (Pmode, r_label);
1143 r_save_area = expand_normal (t_save_area);
1144 r_save_area = convert_memory_address (Pmode, r_save_area);
1145 /* Copy the address of the save location to a register just in case it was
1146 based on the frame pointer. */
1147 r_save_area = copy_to_reg (r_save_area);
1148 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1149 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1150 plus_constant (Pmode, r_save_area,
1151 GET_MODE_SIZE (Pmode)));
1152
1153 crtl->has_nonlocal_goto = 1;
1154
1155 #ifdef HAVE_nonlocal_goto
1156 /* ??? We no longer need to pass the static chain value, afaik. */
1157 if (HAVE_nonlocal_goto)
1158 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1159 else
1160 #endif
1161 {
1162 r_label = copy_to_reg (r_label);
1163
1164 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1165 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1166
1167 /* Restore frame pointer for containing function. */
1168 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1169 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1170
1171 /* USE of hard_frame_pointer_rtx added for consistency;
1172 not clear if really needed. */
1173 emit_use (hard_frame_pointer_rtx);
1174 emit_use (stack_pointer_rtx);
1175
1176 /* If the architecture is using a GP register, we must
1177 conservatively assume that the target function makes use of it.
1178 The prologue of functions with nonlocal gotos must therefore
1179 initialize the GP register to the appropriate value, and we
1180 must then make sure that this value is live at the point
1181 of the jump. (Note that this doesn't necessarily apply
1182 to targets with a nonlocal_goto pattern; they are free
1183 to implement it in their own way. Note also that this is
1184 a no-op if the GP register is a global invariant.) */
1185 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1186 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1187 emit_use (pic_offset_table_rtx);
1188
1189 emit_indirect_jump (r_label);
1190 }
1191
1192 /* Search backwards to the jump insn and mark it as a
1193 non-local goto. */
1194 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1195 {
1196 if (JUMP_P (insn))
1197 {
1198 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1199 break;
1200 }
1201 else if (CALL_P (insn))
1202 break;
1203 }
1204
1205 return const0_rtx;
1206 }
1207
1208 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1209 (not all will be used on all machines) that was passed to __builtin_setjmp.
1210 It updates the stack pointer in that block to correspond to the current
1211 stack pointer. */
1212
1213 static void
1214 expand_builtin_update_setjmp_buf (rtx buf_addr)
1215 {
1216 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1217 rtx stack_save
1218 = gen_rtx_MEM (sa_mode,
1219 memory_address
1220 (sa_mode,
1221 plus_constant (Pmode, buf_addr,
1222 2 * GET_MODE_SIZE (Pmode))));
1223
1224 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1225 }
1226
1227 /* Expand a call to __builtin_prefetch. For a target that does not support
1228 data prefetch, evaluate the memory address argument in case it has side
1229 effects. */
1230
1231 static void
1232 expand_builtin_prefetch (tree exp)
1233 {
1234 tree arg0, arg1, arg2;
1235 int nargs;
1236 rtx op0, op1, op2;
1237
1238 if (!validate_arglist (exp, POINTER_TYPE, 0))
1239 return;
1240
1241 arg0 = CALL_EXPR_ARG (exp, 0);
1242
1243 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1244 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1245 locality). */
1246 nargs = call_expr_nargs (exp);
1247 if (nargs > 1)
1248 arg1 = CALL_EXPR_ARG (exp, 1);
1249 else
1250 arg1 = integer_zero_node;
1251 if (nargs > 2)
1252 arg2 = CALL_EXPR_ARG (exp, 2);
1253 else
1254 arg2 = integer_three_node;
1255
1256 /* Argument 0 is an address. */
1257 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1258
1259 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1260 if (TREE_CODE (arg1) != INTEGER_CST)
1261 {
1262 error ("second argument to %<__builtin_prefetch%> must be a constant");
1263 arg1 = integer_zero_node;
1264 }
1265 op1 = expand_normal (arg1);
1266 /* Argument 1 must be either zero or one. */
1267 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1268 {
1269 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1270 " using zero");
1271 op1 = const0_rtx;
1272 }
1273
1274 /* Argument 2 (locality) must be a compile-time constant int. */
1275 if (TREE_CODE (arg2) != INTEGER_CST)
1276 {
1277 error ("third argument to %<__builtin_prefetch%> must be a constant");
1278 arg2 = integer_zero_node;
1279 }
1280 op2 = expand_normal (arg2);
1281 /* Argument 2 must be 0, 1, 2, or 3. */
1282 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1283 {
1284 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1285 op2 = const0_rtx;
1286 }
1287
1288 #ifdef HAVE_prefetch
1289 if (HAVE_prefetch)
1290 {
1291 struct expand_operand ops[3];
1292
1293 create_address_operand (&ops[0], op0);
1294 create_integer_operand (&ops[1], INTVAL (op1));
1295 create_integer_operand (&ops[2], INTVAL (op2));
1296 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1297 return;
1298 }
1299 #endif
1300
1301 /* Don't do anything with direct references to volatile memory, but
1302 generate code to handle other side effects. */
1303 if (!MEM_P (op0) && side_effects_p (op0))
1304 emit_insn (op0);
1305 }
1306
1307 /* Get a MEM rtx for expression EXP which is the address of an operand
1308 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1309 the maximum length of the block of memory that might be accessed or
1310 NULL if unknown. */
1311
1312 static rtx
1313 get_memory_rtx (tree exp, tree len)
1314 {
1315 tree orig_exp = exp;
1316 rtx addr, mem;
1317
1318 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1319 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1320 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1321 exp = TREE_OPERAND (exp, 0);
1322
1323 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1324 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1325
1326 /* Get an expression we can use to find the attributes to assign to MEM.
1327 First remove any nops. */
1328 while (CONVERT_EXPR_P (exp)
1329 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1330 exp = TREE_OPERAND (exp, 0);
1331
1332 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1333 (as builtin stringops may alias with anything). */
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_one_node, len)),
1338 exp, build_int_cst (ptr_type_node, 0));
1339
1340 /* If the MEM_REF has no acceptable address, try to get the base object
1341 from the original address we got, and build an all-aliasing
1342 unknown-sized access to that one. */
1343 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1344 set_mem_attributes (mem, exp, 0);
1345 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1346 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1347 0))))
1348 {
1349 exp = build_fold_addr_expr (exp);
1350 exp = fold_build2 (MEM_REF,
1351 build_array_type (char_type_node,
1352 build_range_type (sizetype,
1353 size_zero_node,
1354 NULL)),
1355 exp, build_int_cst (ptr_type_node, 0));
1356 set_mem_attributes (mem, exp, 0);
1357 }
1358 set_mem_alias_set (mem, 0);
1359 return mem;
1360 }
1361 \f
1362 /* Built-in functions to perform an untyped call and return. */
1363
1364 #define apply_args_mode \
1365 (this_target_builtins->x_apply_args_mode)
1366 #define apply_result_mode \
1367 (this_target_builtins->x_apply_result_mode)
1368
1369 /* Return the size required for the block returned by __builtin_apply_args,
1370 and initialize apply_args_mode. */
1371
1372 static int
1373 apply_args_size (void)
1374 {
1375 static int size = -1;
1376 int align;
1377 unsigned int regno;
1378 enum machine_mode mode;
1379
1380 /* The values computed by this function never change. */
1381 if (size < 0)
1382 {
1383 /* The first value is the incoming arg-pointer. */
1384 size = GET_MODE_SIZE (Pmode);
1385
1386 /* The second value is the structure value address unless this is
1387 passed as an "invisible" first argument. */
1388 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1389 size += GET_MODE_SIZE (Pmode);
1390
1391 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1392 if (FUNCTION_ARG_REGNO_P (regno))
1393 {
1394 mode = targetm.calls.get_raw_arg_mode (regno);
1395
1396 gcc_assert (mode != VOIDmode);
1397
1398 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1399 if (size % align != 0)
1400 size = CEIL (size, align) * align;
1401 size += GET_MODE_SIZE (mode);
1402 apply_args_mode[regno] = mode;
1403 }
1404 else
1405 {
1406 apply_args_mode[regno] = VOIDmode;
1407 }
1408 }
1409 return size;
1410 }
1411
1412 /* Return the size required for the block returned by __builtin_apply,
1413 and initialize apply_result_mode. */
1414
1415 static int
1416 apply_result_size (void)
1417 {
1418 static int size = -1;
1419 int align, regno;
1420 enum machine_mode mode;
1421
1422 /* The values computed by this function never change. */
1423 if (size < 0)
1424 {
1425 size = 0;
1426
1427 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1428 if (targetm.calls.function_value_regno_p (regno))
1429 {
1430 mode = targetm.calls.get_raw_result_mode (regno);
1431
1432 gcc_assert (mode != VOIDmode);
1433
1434 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1435 if (size % align != 0)
1436 size = CEIL (size, align) * align;
1437 size += GET_MODE_SIZE (mode);
1438 apply_result_mode[regno] = mode;
1439 }
1440 else
1441 apply_result_mode[regno] = VOIDmode;
1442
1443 /* Allow targets that use untyped_call and untyped_return to override
1444 the size so that machine-specific information can be stored here. */
1445 #ifdef APPLY_RESULT_SIZE
1446 size = APPLY_RESULT_SIZE;
1447 #endif
1448 }
1449 return size;
1450 }
1451
1452 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1453 /* Create a vector describing the result block RESULT. If SAVEP is true,
1454 the result block is used to save the values; otherwise it is used to
1455 restore the values. */
1456
1457 static rtx
1458 result_vector (int savep, rtx result)
1459 {
1460 int regno, size, align, nelts;
1461 enum machine_mode mode;
1462 rtx reg, mem;
1463 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1464
1465 size = nelts = 0;
1466 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1467 if ((mode = apply_result_mode[regno]) != VOIDmode)
1468 {
1469 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1470 if (size % align != 0)
1471 size = CEIL (size, align) * align;
1472 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1473 mem = adjust_address (result, mode, size);
1474 savevec[nelts++] = (savep
1475 ? gen_rtx_SET (VOIDmode, mem, reg)
1476 : gen_rtx_SET (VOIDmode, reg, mem));
1477 size += GET_MODE_SIZE (mode);
1478 }
1479 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1480 }
1481 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1482
1483 /* Save the state required to perform an untyped call with the same
1484 arguments as were passed to the current function. */
1485
1486 static rtx
1487 expand_builtin_apply_args_1 (void)
1488 {
1489 rtx registers, tem;
1490 int size, align, regno;
1491 enum machine_mode mode;
1492 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1493
1494 /* Create a block where the arg-pointer, structure value address,
1495 and argument registers can be saved. */
1496 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1497
1498 /* Walk past the arg-pointer and structure value address. */
1499 size = GET_MODE_SIZE (Pmode);
1500 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1501 size += GET_MODE_SIZE (Pmode);
1502
1503 /* Save each register used in calling a function to the block. */
1504 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1505 if ((mode = apply_args_mode[regno]) != VOIDmode)
1506 {
1507 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1508 if (size % align != 0)
1509 size = CEIL (size, align) * align;
1510
1511 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1512
1513 emit_move_insn (adjust_address (registers, mode, size), tem);
1514 size += GET_MODE_SIZE (mode);
1515 }
1516
1517 /* Save the arg pointer to the block. */
1518 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1519 #ifdef STACK_GROWS_DOWNWARD
1520 /* We need the pointer as the caller actually passed them to us, not
1521 as we might have pretended they were passed. Make sure it's a valid
1522 operand, as emit_move_insn isn't expected to handle a PLUS. */
1523 tem
1524 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1525 NULL_RTX);
1526 #endif
1527 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1528
1529 size = GET_MODE_SIZE (Pmode);
1530
1531 /* Save the structure value address unless this is passed as an
1532 "invisible" first argument. */
1533 if (struct_incoming_value)
1534 {
1535 emit_move_insn (adjust_address (registers, Pmode, size),
1536 copy_to_reg (struct_incoming_value));
1537 size += GET_MODE_SIZE (Pmode);
1538 }
1539
1540 /* Return the address of the block. */
1541 return copy_addr_to_reg (XEXP (registers, 0));
1542 }
1543
1544 /* __builtin_apply_args returns block of memory allocated on
1545 the stack into which is stored the arg pointer, structure
1546 value address, static chain, and all the registers that might
1547 possibly be used in performing a function call. The code is
1548 moved to the start of the function so the incoming values are
1549 saved. */
1550
1551 static rtx
1552 expand_builtin_apply_args (void)
1553 {
1554 /* Don't do __builtin_apply_args more than once in a function.
1555 Save the result of the first call and reuse it. */
1556 if (apply_args_value != 0)
1557 return apply_args_value;
1558 {
1559 /* When this function is called, it means that registers must be
1560 saved on entry to this function. So we migrate the
1561 call to the first insn of this function. */
1562 rtx temp;
1563 rtx seq;
1564
1565 start_sequence ();
1566 temp = expand_builtin_apply_args_1 ();
1567 seq = get_insns ();
1568 end_sequence ();
1569
1570 apply_args_value = temp;
1571
1572 /* Put the insns after the NOTE that starts the function.
1573 If this is inside a start_sequence, make the outer-level insn
1574 chain current, so the code is placed at the start of the
1575 function. If internal_arg_pointer is a non-virtual pseudo,
1576 it needs to be placed after the function that initializes
1577 that pseudo. */
1578 push_topmost_sequence ();
1579 if (REG_P (crtl->args.internal_arg_pointer)
1580 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1581 emit_insn_before (seq, parm_birth_insn);
1582 else
1583 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1584 pop_topmost_sequence ();
1585 return temp;
1586 }
1587 }
1588
1589 /* Perform an untyped call and save the state required to perform an
1590 untyped return of whatever value was returned by the given function. */
1591
1592 static rtx
1593 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1594 {
1595 int size, align, regno;
1596 enum machine_mode mode;
1597 rtx incoming_args, result, reg, dest, src, call_insn;
1598 rtx old_stack_level = 0;
1599 rtx call_fusage = 0;
1600 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1601
1602 arguments = convert_memory_address (Pmode, arguments);
1603
1604 /* Create a block where the return registers can be saved. */
1605 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1606
1607 /* Fetch the arg pointer from the ARGUMENTS block. */
1608 incoming_args = gen_reg_rtx (Pmode);
1609 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1610 #ifndef STACK_GROWS_DOWNWARD
1611 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1612 incoming_args, 0, OPTAB_LIB_WIDEN);
1613 #endif
1614
1615 /* Push a new argument block and copy the arguments. Do not allow
1616 the (potential) memcpy call below to interfere with our stack
1617 manipulations. */
1618 do_pending_stack_adjust ();
1619 NO_DEFER_POP;
1620
1621 /* Save the stack with nonlocal if available. */
1622 #ifdef HAVE_save_stack_nonlocal
1623 if (HAVE_save_stack_nonlocal)
1624 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1625 else
1626 #endif
1627 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1628
1629 /* Allocate a block of memory onto the stack and copy the memory
1630 arguments to the outgoing arguments address. We can pass TRUE
1631 as the 4th argument because we just saved the stack pointer
1632 and will restore it right after the call. */
1633 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1634
1635 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1636 may have already set current_function_calls_alloca to true.
1637 current_function_calls_alloca won't be set if argsize is zero,
1638 so we have to guarantee need_drap is true here. */
1639 if (SUPPORTS_STACK_ALIGNMENT)
1640 crtl->need_drap = true;
1641
1642 dest = virtual_outgoing_args_rtx;
1643 #ifndef STACK_GROWS_DOWNWARD
1644 if (CONST_INT_P (argsize))
1645 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1646 else
1647 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1648 #endif
1649 dest = gen_rtx_MEM (BLKmode, dest);
1650 set_mem_align (dest, PARM_BOUNDARY);
1651 src = gen_rtx_MEM (BLKmode, incoming_args);
1652 set_mem_align (src, PARM_BOUNDARY);
1653 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1654
1655 /* Refer to the argument block. */
1656 apply_args_size ();
1657 arguments = gen_rtx_MEM (BLKmode, arguments);
1658 set_mem_align (arguments, PARM_BOUNDARY);
1659
1660 /* Walk past the arg-pointer and structure value address. */
1661 size = GET_MODE_SIZE (Pmode);
1662 if (struct_value)
1663 size += GET_MODE_SIZE (Pmode);
1664
1665 /* Restore each of the registers previously saved. Make USE insns
1666 for each of these registers for use in making the call. */
1667 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1668 if ((mode = apply_args_mode[regno]) != VOIDmode)
1669 {
1670 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1671 if (size % align != 0)
1672 size = CEIL (size, align) * align;
1673 reg = gen_rtx_REG (mode, regno);
1674 emit_move_insn (reg, adjust_address (arguments, mode, size));
1675 use_reg (&call_fusage, reg);
1676 size += GET_MODE_SIZE (mode);
1677 }
1678
1679 /* Restore the structure value address unless this is passed as an
1680 "invisible" first argument. */
1681 size = GET_MODE_SIZE (Pmode);
1682 if (struct_value)
1683 {
1684 rtx value = gen_reg_rtx (Pmode);
1685 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1686 emit_move_insn (struct_value, value);
1687 if (REG_P (struct_value))
1688 use_reg (&call_fusage, struct_value);
1689 size += GET_MODE_SIZE (Pmode);
1690 }
1691
1692 /* All arguments and registers used for the call are set up by now! */
1693 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1694
1695 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1696 and we don't want to load it into a register as an optimization,
1697 because prepare_call_address already did it if it should be done. */
1698 if (GET_CODE (function) != SYMBOL_REF)
1699 function = memory_address (FUNCTION_MODE, function);
1700
1701 /* Generate the actual call instruction and save the return value. */
1702 #ifdef HAVE_untyped_call
1703 if (HAVE_untyped_call)
1704 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1705 result, result_vector (1, result)));
1706 else
1707 #endif
1708 #ifdef HAVE_call_value
1709 if (HAVE_call_value)
1710 {
1711 rtx valreg = 0;
1712
1713 /* Locate the unique return register. It is not possible to
1714 express a call that sets more than one return register using
1715 call_value; use untyped_call for that. In fact, untyped_call
1716 only needs to save the return registers in the given block. */
1717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1718 if ((mode = apply_result_mode[regno]) != VOIDmode)
1719 {
1720 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1721
1722 valreg = gen_rtx_REG (mode, regno);
1723 }
1724
1725 emit_call_insn (GEN_CALL_VALUE (valreg,
1726 gen_rtx_MEM (FUNCTION_MODE, function),
1727 const0_rtx, NULL_RTX, const0_rtx));
1728
1729 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1730 }
1731 else
1732 #endif
1733 gcc_unreachable ();
1734
1735 /* Find the CALL insn we just emitted, and attach the register usage
1736 information. */
1737 call_insn = last_call_insn ();
1738 add_function_usage_to (call_insn, call_fusage);
1739
1740 /* Restore the stack. */
1741 #ifdef HAVE_save_stack_nonlocal
1742 if (HAVE_save_stack_nonlocal)
1743 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1744 else
1745 #endif
1746 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1747 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1748
1749 OK_DEFER_POP;
1750
1751 /* Return the address of the result block. */
1752 result = copy_addr_to_reg (XEXP (result, 0));
1753 return convert_memory_address (ptr_mode, result);
1754 }
1755
1756 /* Perform an untyped return. */
1757
1758 static void
1759 expand_builtin_return (rtx result)
1760 {
1761 int size, align, regno;
1762 enum machine_mode mode;
1763 rtx reg;
1764 rtx call_fusage = 0;
1765
1766 result = convert_memory_address (Pmode, result);
1767
1768 apply_result_size ();
1769 result = gen_rtx_MEM (BLKmode, result);
1770
1771 #ifdef HAVE_untyped_return
1772 if (HAVE_untyped_return)
1773 {
1774 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1775 emit_barrier ();
1776 return;
1777 }
1778 #endif
1779
1780 /* Restore the return value and note that each value is used. */
1781 size = 0;
1782 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1783 if ((mode = apply_result_mode[regno]) != VOIDmode)
1784 {
1785 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1786 if (size % align != 0)
1787 size = CEIL (size, align) * align;
1788 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1789 emit_move_insn (reg, adjust_address (result, mode, size));
1790
1791 push_to_sequence (call_fusage);
1792 emit_use (reg);
1793 call_fusage = get_insns ();
1794 end_sequence ();
1795 size += GET_MODE_SIZE (mode);
1796 }
1797
1798 /* Put the USE insns before the return. */
1799 emit_insn (call_fusage);
1800
1801 /* Return whatever values was restored by jumping directly to the end
1802 of the function. */
1803 expand_naked_return ();
1804 }
1805
1806 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1807
1808 static enum type_class
1809 type_to_class (tree type)
1810 {
1811 switch (TREE_CODE (type))
1812 {
1813 case VOID_TYPE: return void_type_class;
1814 case INTEGER_TYPE: return integer_type_class;
1815 case ENUMERAL_TYPE: return enumeral_type_class;
1816 case BOOLEAN_TYPE: return boolean_type_class;
1817 case POINTER_TYPE: return pointer_type_class;
1818 case REFERENCE_TYPE: return reference_type_class;
1819 case OFFSET_TYPE: return offset_type_class;
1820 case REAL_TYPE: return real_type_class;
1821 case COMPLEX_TYPE: return complex_type_class;
1822 case FUNCTION_TYPE: return function_type_class;
1823 case METHOD_TYPE: return method_type_class;
1824 case RECORD_TYPE: return record_type_class;
1825 case UNION_TYPE:
1826 case QUAL_UNION_TYPE: return union_type_class;
1827 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1828 ? string_type_class : array_type_class);
1829 case LANG_TYPE: return lang_type_class;
1830 default: return no_type_class;
1831 }
1832 }
1833
1834 /* Expand a call EXP to __builtin_classify_type. */
1835
1836 static rtx
1837 expand_builtin_classify_type (tree exp)
1838 {
1839 if (call_expr_nargs (exp))
1840 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1841 return GEN_INT (no_type_class);
1842 }
1843
1844 /* This helper macro, meant to be used in mathfn_built_in below,
1845 determines which among a set of three builtin math functions is
1846 appropriate for a given type mode. The `F' and `L' cases are
1847 automatically generated from the `double' case. */
1848 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1849 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1850 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1851 fcodel = BUILT_IN_MATHFN##L ; break;
1852 /* Similar to above, but appends _R after any F/L suffix. */
1853 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1854 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1855 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1856 fcodel = BUILT_IN_MATHFN##L_R ; break;
1857
1858 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1859 if available. If IMPLICIT is true use the implicit builtin declaration,
1860 otherwise use the explicit declaration. If we can't do the conversion,
1861 return zero. */
1862
1863 static tree
1864 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1865 {
1866 enum built_in_function fcode, fcodef, fcodel, fcode2;
1867
1868 switch (fn)
1869 {
1870 CASE_MATHFN (BUILT_IN_ACOS)
1871 CASE_MATHFN (BUILT_IN_ACOSH)
1872 CASE_MATHFN (BUILT_IN_ASIN)
1873 CASE_MATHFN (BUILT_IN_ASINH)
1874 CASE_MATHFN (BUILT_IN_ATAN)
1875 CASE_MATHFN (BUILT_IN_ATAN2)
1876 CASE_MATHFN (BUILT_IN_ATANH)
1877 CASE_MATHFN (BUILT_IN_CBRT)
1878 CASE_MATHFN (BUILT_IN_CEIL)
1879 CASE_MATHFN (BUILT_IN_CEXPI)
1880 CASE_MATHFN (BUILT_IN_COPYSIGN)
1881 CASE_MATHFN (BUILT_IN_COS)
1882 CASE_MATHFN (BUILT_IN_COSH)
1883 CASE_MATHFN (BUILT_IN_DREM)
1884 CASE_MATHFN (BUILT_IN_ERF)
1885 CASE_MATHFN (BUILT_IN_ERFC)
1886 CASE_MATHFN (BUILT_IN_EXP)
1887 CASE_MATHFN (BUILT_IN_EXP10)
1888 CASE_MATHFN (BUILT_IN_EXP2)
1889 CASE_MATHFN (BUILT_IN_EXPM1)
1890 CASE_MATHFN (BUILT_IN_FABS)
1891 CASE_MATHFN (BUILT_IN_FDIM)
1892 CASE_MATHFN (BUILT_IN_FLOOR)
1893 CASE_MATHFN (BUILT_IN_FMA)
1894 CASE_MATHFN (BUILT_IN_FMAX)
1895 CASE_MATHFN (BUILT_IN_FMIN)
1896 CASE_MATHFN (BUILT_IN_FMOD)
1897 CASE_MATHFN (BUILT_IN_FREXP)
1898 CASE_MATHFN (BUILT_IN_GAMMA)
1899 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1900 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1901 CASE_MATHFN (BUILT_IN_HYPOT)
1902 CASE_MATHFN (BUILT_IN_ILOGB)
1903 CASE_MATHFN (BUILT_IN_ICEIL)
1904 CASE_MATHFN (BUILT_IN_IFLOOR)
1905 CASE_MATHFN (BUILT_IN_INF)
1906 CASE_MATHFN (BUILT_IN_IRINT)
1907 CASE_MATHFN (BUILT_IN_IROUND)
1908 CASE_MATHFN (BUILT_IN_ISINF)
1909 CASE_MATHFN (BUILT_IN_J0)
1910 CASE_MATHFN (BUILT_IN_J1)
1911 CASE_MATHFN (BUILT_IN_JN)
1912 CASE_MATHFN (BUILT_IN_LCEIL)
1913 CASE_MATHFN (BUILT_IN_LDEXP)
1914 CASE_MATHFN (BUILT_IN_LFLOOR)
1915 CASE_MATHFN (BUILT_IN_LGAMMA)
1916 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1917 CASE_MATHFN (BUILT_IN_LLCEIL)
1918 CASE_MATHFN (BUILT_IN_LLFLOOR)
1919 CASE_MATHFN (BUILT_IN_LLRINT)
1920 CASE_MATHFN (BUILT_IN_LLROUND)
1921 CASE_MATHFN (BUILT_IN_LOG)
1922 CASE_MATHFN (BUILT_IN_LOG10)
1923 CASE_MATHFN (BUILT_IN_LOG1P)
1924 CASE_MATHFN (BUILT_IN_LOG2)
1925 CASE_MATHFN (BUILT_IN_LOGB)
1926 CASE_MATHFN (BUILT_IN_LRINT)
1927 CASE_MATHFN (BUILT_IN_LROUND)
1928 CASE_MATHFN (BUILT_IN_MODF)
1929 CASE_MATHFN (BUILT_IN_NAN)
1930 CASE_MATHFN (BUILT_IN_NANS)
1931 CASE_MATHFN (BUILT_IN_NEARBYINT)
1932 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1933 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1934 CASE_MATHFN (BUILT_IN_POW)
1935 CASE_MATHFN (BUILT_IN_POWI)
1936 CASE_MATHFN (BUILT_IN_POW10)
1937 CASE_MATHFN (BUILT_IN_REMAINDER)
1938 CASE_MATHFN (BUILT_IN_REMQUO)
1939 CASE_MATHFN (BUILT_IN_RINT)
1940 CASE_MATHFN (BUILT_IN_ROUND)
1941 CASE_MATHFN (BUILT_IN_SCALB)
1942 CASE_MATHFN (BUILT_IN_SCALBLN)
1943 CASE_MATHFN (BUILT_IN_SCALBN)
1944 CASE_MATHFN (BUILT_IN_SIGNBIT)
1945 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1946 CASE_MATHFN (BUILT_IN_SIN)
1947 CASE_MATHFN (BUILT_IN_SINCOS)
1948 CASE_MATHFN (BUILT_IN_SINH)
1949 CASE_MATHFN (BUILT_IN_SQRT)
1950 CASE_MATHFN (BUILT_IN_TAN)
1951 CASE_MATHFN (BUILT_IN_TANH)
1952 CASE_MATHFN (BUILT_IN_TGAMMA)
1953 CASE_MATHFN (BUILT_IN_TRUNC)
1954 CASE_MATHFN (BUILT_IN_Y0)
1955 CASE_MATHFN (BUILT_IN_Y1)
1956 CASE_MATHFN (BUILT_IN_YN)
1957
1958 default:
1959 return NULL_TREE;
1960 }
1961
1962 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1963 fcode2 = fcode;
1964 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1965 fcode2 = fcodef;
1966 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1967 fcode2 = fcodel;
1968 else
1969 return NULL_TREE;
1970
1971 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1972 return NULL_TREE;
1973
1974 return builtin_decl_explicit (fcode2);
1975 }
1976
1977 /* Like mathfn_built_in_1(), but always use the implicit array. */
1978
1979 tree
1980 mathfn_built_in (tree type, enum built_in_function fn)
1981 {
1982 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1983 }
1984
1985 /* If errno must be maintained, expand the RTL to check if the result,
1986 TARGET, of a built-in function call, EXP, is NaN, and if so set
1987 errno to EDOM. */
1988
1989 static void
1990 expand_errno_check (tree exp, rtx target)
1991 {
1992 rtx lab = gen_label_rtx ();
1993
1994 /* Test the result; if it is NaN, set errno=EDOM because
1995 the argument was not in the domain. */
1996 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1997 NULL_RTX, NULL_RTX, lab,
1998 /* The jump is very likely. */
1999 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2000
2001 #ifdef TARGET_EDOM
2002 /* If this built-in doesn't throw an exception, set errno directly. */
2003 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2004 {
2005 #ifdef GEN_ERRNO_RTX
2006 rtx errno_rtx = GEN_ERRNO_RTX;
2007 #else
2008 rtx errno_rtx
2009 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2010 #endif
2011 emit_move_insn (errno_rtx,
2012 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2013 emit_label (lab);
2014 return;
2015 }
2016 #endif
2017
2018 /* Make sure the library call isn't expanded as a tail call. */
2019 CALL_EXPR_TAILCALL (exp) = 0;
2020
2021 /* We can't set errno=EDOM directly; let the library call do it.
2022 Pop the arguments right away in case the call gets deleted. */
2023 NO_DEFER_POP;
2024 expand_call (exp, target, 0);
2025 OK_DEFER_POP;
2026 emit_label (lab);
2027 }
2028
2029 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2030 Return NULL_RTX if a normal call should be emitted rather than expanding
2031 the function in-line. EXP is the expression that is a call to the builtin
2032 function; if convenient, the result should be placed in TARGET.
2033 SUBTARGET may be used as the target for computing one of EXP's operands. */
2034
2035 static rtx
2036 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2037 {
2038 optab builtin_optab;
2039 rtx op0, insns;
2040 tree fndecl = get_callee_fndecl (exp);
2041 enum machine_mode mode;
2042 bool errno_set = false;
2043 bool try_widening = false;
2044 tree arg;
2045
2046 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2047 return NULL_RTX;
2048
2049 arg = CALL_EXPR_ARG (exp, 0);
2050
2051 switch (DECL_FUNCTION_CODE (fndecl))
2052 {
2053 CASE_FLT_FN (BUILT_IN_SQRT):
2054 errno_set = ! tree_expr_nonnegative_p (arg);
2055 try_widening = true;
2056 builtin_optab = sqrt_optab;
2057 break;
2058 CASE_FLT_FN (BUILT_IN_EXP):
2059 errno_set = true; builtin_optab = exp_optab; break;
2060 CASE_FLT_FN (BUILT_IN_EXP10):
2061 CASE_FLT_FN (BUILT_IN_POW10):
2062 errno_set = true; builtin_optab = exp10_optab; break;
2063 CASE_FLT_FN (BUILT_IN_EXP2):
2064 errno_set = true; builtin_optab = exp2_optab; break;
2065 CASE_FLT_FN (BUILT_IN_EXPM1):
2066 errno_set = true; builtin_optab = expm1_optab; break;
2067 CASE_FLT_FN (BUILT_IN_LOGB):
2068 errno_set = true; builtin_optab = logb_optab; break;
2069 CASE_FLT_FN (BUILT_IN_LOG):
2070 errno_set = true; builtin_optab = log_optab; break;
2071 CASE_FLT_FN (BUILT_IN_LOG10):
2072 errno_set = true; builtin_optab = log10_optab; break;
2073 CASE_FLT_FN (BUILT_IN_LOG2):
2074 errno_set = true; builtin_optab = log2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_LOG1P):
2076 errno_set = true; builtin_optab = log1p_optab; break;
2077 CASE_FLT_FN (BUILT_IN_ASIN):
2078 builtin_optab = asin_optab; break;
2079 CASE_FLT_FN (BUILT_IN_ACOS):
2080 builtin_optab = acos_optab; break;
2081 CASE_FLT_FN (BUILT_IN_TAN):
2082 builtin_optab = tan_optab; break;
2083 CASE_FLT_FN (BUILT_IN_ATAN):
2084 builtin_optab = atan_optab; break;
2085 CASE_FLT_FN (BUILT_IN_FLOOR):
2086 builtin_optab = floor_optab; break;
2087 CASE_FLT_FN (BUILT_IN_CEIL):
2088 builtin_optab = ceil_optab; break;
2089 CASE_FLT_FN (BUILT_IN_TRUNC):
2090 builtin_optab = btrunc_optab; break;
2091 CASE_FLT_FN (BUILT_IN_ROUND):
2092 builtin_optab = round_optab; break;
2093 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2094 builtin_optab = nearbyint_optab;
2095 if (flag_trapping_math)
2096 break;
2097 /* Else fallthrough and expand as rint. */
2098 CASE_FLT_FN (BUILT_IN_RINT):
2099 builtin_optab = rint_optab; break;
2100 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2101 builtin_optab = significand_optab; break;
2102 default:
2103 gcc_unreachable ();
2104 }
2105
2106 /* Make a suitable register to place result in. */
2107 mode = TYPE_MODE (TREE_TYPE (exp));
2108
2109 if (! flag_errno_math || ! HONOR_NANS (mode))
2110 errno_set = false;
2111
2112 /* Before working hard, check whether the instruction is available, but try
2113 to widen the mode for specific operations. */
2114 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2115 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2116 && (!errno_set || !optimize_insn_for_size_p ()))
2117 {
2118 rtx result = gen_reg_rtx (mode);
2119
2120 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2121 need to expand the argument again. This way, we will not perform
2122 side-effects more the once. */
2123 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2124
2125 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2126
2127 start_sequence ();
2128
2129 /* Compute into RESULT.
2130 Set RESULT to wherever the result comes back. */
2131 result = expand_unop (mode, builtin_optab, op0, result, 0);
2132
2133 if (result != 0)
2134 {
2135 if (errno_set)
2136 expand_errno_check (exp, result);
2137
2138 /* Output the entire sequence. */
2139 insns = get_insns ();
2140 end_sequence ();
2141 emit_insn (insns);
2142 return result;
2143 }
2144
2145 /* If we were unable to expand via the builtin, stop the sequence
2146 (without outputting the insns) and call to the library function
2147 with the stabilized argument list. */
2148 end_sequence ();
2149 }
2150
2151 return expand_call (exp, target, target == const0_rtx);
2152 }
2153
2154 /* Expand a call to the builtin binary math functions (pow and atan2).
2155 Return NULL_RTX if a normal call should be emitted rather than expanding the
2156 function in-line. EXP is the expression that is a call to the builtin
2157 function; if convenient, the result should be placed in TARGET.
2158 SUBTARGET may be used as the target for computing one of EXP's
2159 operands. */
2160
2161 static rtx
2162 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2163 {
2164 optab builtin_optab;
2165 rtx op0, op1, insns, result;
2166 int op1_type = REAL_TYPE;
2167 tree fndecl = get_callee_fndecl (exp);
2168 tree arg0, arg1;
2169 enum machine_mode mode;
2170 bool errno_set = true;
2171
2172 switch (DECL_FUNCTION_CODE (fndecl))
2173 {
2174 CASE_FLT_FN (BUILT_IN_SCALBN):
2175 CASE_FLT_FN (BUILT_IN_SCALBLN):
2176 CASE_FLT_FN (BUILT_IN_LDEXP):
2177 op1_type = INTEGER_TYPE;
2178 default:
2179 break;
2180 }
2181
2182 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2183 return NULL_RTX;
2184
2185 arg0 = CALL_EXPR_ARG (exp, 0);
2186 arg1 = CALL_EXPR_ARG (exp, 1);
2187
2188 switch (DECL_FUNCTION_CODE (fndecl))
2189 {
2190 CASE_FLT_FN (BUILT_IN_POW):
2191 builtin_optab = pow_optab; break;
2192 CASE_FLT_FN (BUILT_IN_ATAN2):
2193 builtin_optab = atan2_optab; break;
2194 CASE_FLT_FN (BUILT_IN_SCALB):
2195 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2196 return 0;
2197 builtin_optab = scalb_optab; break;
2198 CASE_FLT_FN (BUILT_IN_SCALBN):
2199 CASE_FLT_FN (BUILT_IN_SCALBLN):
2200 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2201 return 0;
2202 /* Fall through... */
2203 CASE_FLT_FN (BUILT_IN_LDEXP):
2204 builtin_optab = ldexp_optab; break;
2205 CASE_FLT_FN (BUILT_IN_FMOD):
2206 builtin_optab = fmod_optab; break;
2207 CASE_FLT_FN (BUILT_IN_REMAINDER):
2208 CASE_FLT_FN (BUILT_IN_DREM):
2209 builtin_optab = remainder_optab; break;
2210 default:
2211 gcc_unreachable ();
2212 }
2213
2214 /* Make a suitable register to place result in. */
2215 mode = TYPE_MODE (TREE_TYPE (exp));
2216
2217 /* Before working hard, check whether the instruction is available. */
2218 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2219 return NULL_RTX;
2220
2221 result = gen_reg_rtx (mode);
2222
2223 if (! flag_errno_math || ! HONOR_NANS (mode))
2224 errno_set = false;
2225
2226 if (errno_set && optimize_insn_for_size_p ())
2227 return 0;
2228
2229 /* Always stabilize the argument list. */
2230 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2231 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2232
2233 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2234 op1 = expand_normal (arg1);
2235
2236 start_sequence ();
2237
2238 /* Compute into RESULT.
2239 Set RESULT to wherever the result comes back. */
2240 result = expand_binop (mode, builtin_optab, op0, op1,
2241 result, 0, OPTAB_DIRECT);
2242
2243 /* If we were unable to expand via the builtin, stop the sequence
2244 (without outputting the insns) and call to the library function
2245 with the stabilized argument list. */
2246 if (result == 0)
2247 {
2248 end_sequence ();
2249 return expand_call (exp, target, target == const0_rtx);
2250 }
2251
2252 if (errno_set)
2253 expand_errno_check (exp, result);
2254
2255 /* Output the entire sequence. */
2256 insns = get_insns ();
2257 end_sequence ();
2258 emit_insn (insns);
2259
2260 return result;
2261 }
2262
2263 /* Expand a call to the builtin trinary math functions (fma).
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2268 operands. */
2269
2270 static rtx
2271 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2272 {
2273 optab builtin_optab;
2274 rtx op0, op1, op2, insns, result;
2275 tree fndecl = get_callee_fndecl (exp);
2276 tree arg0, arg1, arg2;
2277 enum machine_mode mode;
2278
2279 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2280 return NULL_RTX;
2281
2282 arg0 = CALL_EXPR_ARG (exp, 0);
2283 arg1 = CALL_EXPR_ARG (exp, 1);
2284 arg2 = CALL_EXPR_ARG (exp, 2);
2285
2286 switch (DECL_FUNCTION_CODE (fndecl))
2287 {
2288 CASE_FLT_FN (BUILT_IN_FMA):
2289 builtin_optab = fma_optab; break;
2290 default:
2291 gcc_unreachable ();
2292 }
2293
2294 /* Make a suitable register to place result in. */
2295 mode = TYPE_MODE (TREE_TYPE (exp));
2296
2297 /* Before working hard, check whether the instruction is available. */
2298 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2299 return NULL_RTX;
2300
2301 result = gen_reg_rtx (mode);
2302
2303 /* Always stabilize the argument list. */
2304 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2305 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2306 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2307
2308 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2309 op1 = expand_normal (arg1);
2310 op2 = expand_normal (arg2);
2311
2312 start_sequence ();
2313
2314 /* Compute into RESULT.
2315 Set RESULT to wherever the result comes back. */
2316 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2317 result, 0);
2318
2319 /* If we were unable to expand via the builtin, stop the sequence
2320 (without outputting the insns) and call to the library function
2321 with the stabilized argument list. */
2322 if (result == 0)
2323 {
2324 end_sequence ();
2325 return expand_call (exp, target, target == const0_rtx);
2326 }
2327
2328 /* Output the entire sequence. */
2329 insns = get_insns ();
2330 end_sequence ();
2331 emit_insn (insns);
2332
2333 return result;
2334 }
2335
2336 /* Expand a call to the builtin sin and cos math functions.
2337 Return NULL_RTX if a normal call should be emitted rather than expanding the
2338 function in-line. EXP is the expression that is a call to the builtin
2339 function; if convenient, the result should be placed in TARGET.
2340 SUBTARGET may be used as the target for computing one of EXP's
2341 operands. */
2342
2343 static rtx
2344 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2345 {
2346 optab builtin_optab;
2347 rtx op0, insns;
2348 tree fndecl = get_callee_fndecl (exp);
2349 enum machine_mode mode;
2350 tree arg;
2351
2352 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2353 return NULL_RTX;
2354
2355 arg = CALL_EXPR_ARG (exp, 0);
2356
2357 switch (DECL_FUNCTION_CODE (fndecl))
2358 {
2359 CASE_FLT_FN (BUILT_IN_SIN):
2360 CASE_FLT_FN (BUILT_IN_COS):
2361 builtin_optab = sincos_optab; break;
2362 default:
2363 gcc_unreachable ();
2364 }
2365
2366 /* Make a suitable register to place result in. */
2367 mode = TYPE_MODE (TREE_TYPE (exp));
2368
2369 /* Check if sincos insn is available, otherwise fallback
2370 to sin or cos insn. */
2371 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2372 switch (DECL_FUNCTION_CODE (fndecl))
2373 {
2374 CASE_FLT_FN (BUILT_IN_SIN):
2375 builtin_optab = sin_optab; break;
2376 CASE_FLT_FN (BUILT_IN_COS):
2377 builtin_optab = cos_optab; break;
2378 default:
2379 gcc_unreachable ();
2380 }
2381
2382 /* Before working hard, check whether the instruction is available. */
2383 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2384 {
2385 rtx result = gen_reg_rtx (mode);
2386
2387 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2388 need to expand the argument again. This way, we will not perform
2389 side-effects more the once. */
2390 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2391
2392 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2393
2394 start_sequence ();
2395
2396 /* Compute into RESULT.
2397 Set RESULT to wherever the result comes back. */
2398 if (builtin_optab == sincos_optab)
2399 {
2400 int ok;
2401
2402 switch (DECL_FUNCTION_CODE (fndecl))
2403 {
2404 CASE_FLT_FN (BUILT_IN_SIN):
2405 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2406 break;
2407 CASE_FLT_FN (BUILT_IN_COS):
2408 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2409 break;
2410 default:
2411 gcc_unreachable ();
2412 }
2413 gcc_assert (ok);
2414 }
2415 else
2416 result = expand_unop (mode, builtin_optab, op0, result, 0);
2417
2418 if (result != 0)
2419 {
2420 /* Output the entire sequence. */
2421 insns = get_insns ();
2422 end_sequence ();
2423 emit_insn (insns);
2424 return result;
2425 }
2426
2427 /* If we were unable to expand via the builtin, stop the sequence
2428 (without outputting the insns) and call to the library function
2429 with the stabilized argument list. */
2430 end_sequence ();
2431 }
2432
2433 return expand_call (exp, target, target == const0_rtx);
2434 }
2435
2436 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2437 return an RTL instruction code that implements the functionality.
2438 If that isn't possible or available return CODE_FOR_nothing. */
2439
2440 static enum insn_code
2441 interclass_mathfn_icode (tree arg, tree fndecl)
2442 {
2443 bool errno_set = false;
2444 optab builtin_optab = unknown_optab;
2445 enum machine_mode mode;
2446
2447 switch (DECL_FUNCTION_CODE (fndecl))
2448 {
2449 CASE_FLT_FN (BUILT_IN_ILOGB):
2450 errno_set = true; builtin_optab = ilogb_optab; break;
2451 CASE_FLT_FN (BUILT_IN_ISINF):
2452 builtin_optab = isinf_optab; break;
2453 case BUILT_IN_ISNORMAL:
2454 case BUILT_IN_ISFINITE:
2455 CASE_FLT_FN (BUILT_IN_FINITE):
2456 case BUILT_IN_FINITED32:
2457 case BUILT_IN_FINITED64:
2458 case BUILT_IN_FINITED128:
2459 case BUILT_IN_ISINFD32:
2460 case BUILT_IN_ISINFD64:
2461 case BUILT_IN_ISINFD128:
2462 /* These builtins have no optabs (yet). */
2463 break;
2464 default:
2465 gcc_unreachable ();
2466 }
2467
2468 /* There's no easy way to detect the case we need to set EDOM. */
2469 if (flag_errno_math && errno_set)
2470 return CODE_FOR_nothing;
2471
2472 /* Optab mode depends on the mode of the input argument. */
2473 mode = TYPE_MODE (TREE_TYPE (arg));
2474
2475 if (builtin_optab)
2476 return optab_handler (builtin_optab, mode);
2477 return CODE_FOR_nothing;
2478 }
2479
2480 /* Expand a call to one of the builtin math functions that operate on
2481 floating point argument and output an integer result (ilogb, isinf,
2482 isnan, etc).
2483 Return 0 if a normal call should be emitted rather than expanding the
2484 function in-line. EXP is the expression that is a call to the builtin
2485 function; if convenient, the result should be placed in TARGET. */
2486
2487 static rtx
2488 expand_builtin_interclass_mathfn (tree exp, rtx target)
2489 {
2490 enum insn_code icode = CODE_FOR_nothing;
2491 rtx op0;
2492 tree fndecl = get_callee_fndecl (exp);
2493 enum machine_mode mode;
2494 tree arg;
2495
2496 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2497 return NULL_RTX;
2498
2499 arg = CALL_EXPR_ARG (exp, 0);
2500 icode = interclass_mathfn_icode (arg, fndecl);
2501 mode = TYPE_MODE (TREE_TYPE (arg));
2502
2503 if (icode != CODE_FOR_nothing)
2504 {
2505 struct expand_operand ops[1];
2506 rtx last = get_last_insn ();
2507 tree orig_arg = arg;
2508
2509 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2510 need to expand the argument again. This way, we will not perform
2511 side-effects more the once. */
2512 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2513
2514 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2515
2516 if (mode != GET_MODE (op0))
2517 op0 = convert_to_mode (mode, op0, 0);
2518
2519 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2520 if (maybe_legitimize_operands (icode, 0, 1, ops)
2521 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2522 return ops[0].value;
2523
2524 delete_insns_since (last);
2525 CALL_EXPR_ARG (exp, 0) = orig_arg;
2526 }
2527
2528 return NULL_RTX;
2529 }
2530
2531 /* Expand a call to the builtin sincos math function.
2532 Return NULL_RTX if a normal call should be emitted rather than expanding the
2533 function in-line. EXP is the expression that is a call to the builtin
2534 function. */
2535
2536 static rtx
2537 expand_builtin_sincos (tree exp)
2538 {
2539 rtx op0, op1, op2, target1, target2;
2540 enum machine_mode mode;
2541 tree arg, sinp, cosp;
2542 int result;
2543 location_t loc = EXPR_LOCATION (exp);
2544 tree alias_type, alias_off;
2545
2546 if (!validate_arglist (exp, REAL_TYPE,
2547 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2548 return NULL_RTX;
2549
2550 arg = CALL_EXPR_ARG (exp, 0);
2551 sinp = CALL_EXPR_ARG (exp, 1);
2552 cosp = CALL_EXPR_ARG (exp, 2);
2553
2554 /* Make a suitable register to place result in. */
2555 mode = TYPE_MODE (TREE_TYPE (arg));
2556
2557 /* Check if sincos insn is available, otherwise emit the call. */
2558 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2559 return NULL_RTX;
2560
2561 target1 = gen_reg_rtx (mode);
2562 target2 = gen_reg_rtx (mode);
2563
2564 op0 = expand_normal (arg);
2565 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2566 alias_off = build_int_cst (alias_type, 0);
2567 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2568 sinp, alias_off));
2569 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2570 cosp, alias_off));
2571
2572 /* Compute into target1 and target2.
2573 Set TARGET to wherever the result comes back. */
2574 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2575 gcc_assert (result);
2576
2577 /* Move target1 and target2 to the memory locations indicated
2578 by op1 and op2. */
2579 emit_move_insn (op1, target1);
2580 emit_move_insn (op2, target2);
2581
2582 return const0_rtx;
2583 }
2584
2585 /* Expand a call to the internal cexpi builtin to the sincos math function.
2586 EXP is the expression that is a call to the builtin function; if convenient,
2587 the result should be placed in TARGET. */
2588
2589 static rtx
2590 expand_builtin_cexpi (tree exp, rtx target)
2591 {
2592 tree fndecl = get_callee_fndecl (exp);
2593 tree arg, type;
2594 enum machine_mode mode;
2595 rtx op0, op1, op2;
2596 location_t loc = EXPR_LOCATION (exp);
2597
2598 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2599 return NULL_RTX;
2600
2601 arg = CALL_EXPR_ARG (exp, 0);
2602 type = TREE_TYPE (arg);
2603 mode = TYPE_MODE (TREE_TYPE (arg));
2604
2605 /* Try expanding via a sincos optab, fall back to emitting a libcall
2606 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2607 is only generated from sincos, cexp or if we have either of them. */
2608 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2609 {
2610 op1 = gen_reg_rtx (mode);
2611 op2 = gen_reg_rtx (mode);
2612
2613 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2614
2615 /* Compute into op1 and op2. */
2616 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2617 }
2618 else if (targetm.libc_has_function (function_sincos))
2619 {
2620 tree call, fn = NULL_TREE;
2621 tree top1, top2;
2622 rtx op1a, op2a;
2623
2624 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2625 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2626 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2627 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2628 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2629 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2630 else
2631 gcc_unreachable ();
2632
2633 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2634 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2635 op1a = copy_addr_to_reg (XEXP (op1, 0));
2636 op2a = copy_addr_to_reg (XEXP (op2, 0));
2637 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2638 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2639
2640 /* Make sure not to fold the sincos call again. */
2641 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2642 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2643 call, 3, arg, top1, top2));
2644 }
2645 else
2646 {
2647 tree call, fn = NULL_TREE, narg;
2648 tree ctype = build_complex_type (type);
2649
2650 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2651 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2653 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2654 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2655 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2656 else
2657 gcc_unreachable ();
2658
2659 /* If we don't have a decl for cexp create one. This is the
2660 friendliest fallback if the user calls __builtin_cexpi
2661 without full target C99 function support. */
2662 if (fn == NULL_TREE)
2663 {
2664 tree fntype;
2665 const char *name = NULL;
2666
2667 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2668 name = "cexpf";
2669 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2670 name = "cexp";
2671 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2672 name = "cexpl";
2673
2674 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2675 fn = build_fn_decl (name, fntype);
2676 }
2677
2678 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2679 build_real (type, dconst0), arg);
2680
2681 /* Make sure not to fold the cexp call again. */
2682 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2683 return expand_expr (build_call_nary (ctype, call, 1, narg),
2684 target, VOIDmode, EXPAND_NORMAL);
2685 }
2686
2687 /* Now build the proper return type. */
2688 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2689 make_tree (TREE_TYPE (arg), op2),
2690 make_tree (TREE_TYPE (arg), op1)),
2691 target, VOIDmode, EXPAND_NORMAL);
2692 }
2693
2694 /* Conveniently construct a function call expression. FNDECL names the
2695 function to be called, N is the number of arguments, and the "..."
2696 parameters are the argument expressions. Unlike build_call_exr
2697 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2698
2699 static tree
2700 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2701 {
2702 va_list ap;
2703 tree fntype = TREE_TYPE (fndecl);
2704 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2705
2706 va_start (ap, n);
2707 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2708 va_end (ap);
2709 SET_EXPR_LOCATION (fn, loc);
2710 return fn;
2711 }
2712
2713 /* Expand a call to one of the builtin rounding functions gcc defines
2714 as an extension (lfloor and lceil). As these are gcc extensions we
2715 do not need to worry about setting errno to EDOM.
2716 If expanding via optab fails, lower expression to (int)(floor(x)).
2717 EXP is the expression that is a call to the builtin function;
2718 if convenient, the result should be placed in TARGET. */
2719
2720 static rtx
2721 expand_builtin_int_roundingfn (tree exp, rtx target)
2722 {
2723 convert_optab builtin_optab;
2724 rtx op0, insns, tmp;
2725 tree fndecl = get_callee_fndecl (exp);
2726 enum built_in_function fallback_fn;
2727 tree fallback_fndecl;
2728 enum machine_mode mode;
2729 tree arg;
2730
2731 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2732 gcc_unreachable ();
2733
2734 arg = CALL_EXPR_ARG (exp, 0);
2735
2736 switch (DECL_FUNCTION_CODE (fndecl))
2737 {
2738 CASE_FLT_FN (BUILT_IN_ICEIL):
2739 CASE_FLT_FN (BUILT_IN_LCEIL):
2740 CASE_FLT_FN (BUILT_IN_LLCEIL):
2741 builtin_optab = lceil_optab;
2742 fallback_fn = BUILT_IN_CEIL;
2743 break;
2744
2745 CASE_FLT_FN (BUILT_IN_IFLOOR):
2746 CASE_FLT_FN (BUILT_IN_LFLOOR):
2747 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2748 builtin_optab = lfloor_optab;
2749 fallback_fn = BUILT_IN_FLOOR;
2750 break;
2751
2752 default:
2753 gcc_unreachable ();
2754 }
2755
2756 /* Make a suitable register to place result in. */
2757 mode = TYPE_MODE (TREE_TYPE (exp));
2758
2759 target = gen_reg_rtx (mode);
2760
2761 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2762 need to expand the argument again. This way, we will not perform
2763 side-effects more the once. */
2764 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2765
2766 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2767
2768 start_sequence ();
2769
2770 /* Compute into TARGET. */
2771 if (expand_sfix_optab (target, op0, builtin_optab))
2772 {
2773 /* Output the entire sequence. */
2774 insns = get_insns ();
2775 end_sequence ();
2776 emit_insn (insns);
2777 return target;
2778 }
2779
2780 /* If we were unable to expand via the builtin, stop the sequence
2781 (without outputting the insns). */
2782 end_sequence ();
2783
2784 /* Fall back to floating point rounding optab. */
2785 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2786
2787 /* For non-C99 targets we may end up without a fallback fndecl here
2788 if the user called __builtin_lfloor directly. In this case emit
2789 a call to the floor/ceil variants nevertheless. This should result
2790 in the best user experience for not full C99 targets. */
2791 if (fallback_fndecl == NULL_TREE)
2792 {
2793 tree fntype;
2794 const char *name = NULL;
2795
2796 switch (DECL_FUNCTION_CODE (fndecl))
2797 {
2798 case BUILT_IN_ICEIL:
2799 case BUILT_IN_LCEIL:
2800 case BUILT_IN_LLCEIL:
2801 name = "ceil";
2802 break;
2803 case BUILT_IN_ICEILF:
2804 case BUILT_IN_LCEILF:
2805 case BUILT_IN_LLCEILF:
2806 name = "ceilf";
2807 break;
2808 case BUILT_IN_ICEILL:
2809 case BUILT_IN_LCEILL:
2810 case BUILT_IN_LLCEILL:
2811 name = "ceill";
2812 break;
2813 case BUILT_IN_IFLOOR:
2814 case BUILT_IN_LFLOOR:
2815 case BUILT_IN_LLFLOOR:
2816 name = "floor";
2817 break;
2818 case BUILT_IN_IFLOORF:
2819 case BUILT_IN_LFLOORF:
2820 case BUILT_IN_LLFLOORF:
2821 name = "floorf";
2822 break;
2823 case BUILT_IN_IFLOORL:
2824 case BUILT_IN_LFLOORL:
2825 case BUILT_IN_LLFLOORL:
2826 name = "floorl";
2827 break;
2828 default:
2829 gcc_unreachable ();
2830 }
2831
2832 fntype = build_function_type_list (TREE_TYPE (arg),
2833 TREE_TYPE (arg), NULL_TREE);
2834 fallback_fndecl = build_fn_decl (name, fntype);
2835 }
2836
2837 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2838
2839 tmp = expand_normal (exp);
2840 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2841
2842 /* Truncate the result of floating point optab to integer
2843 via expand_fix (). */
2844 target = gen_reg_rtx (mode);
2845 expand_fix (target, tmp, 0);
2846
2847 return target;
2848 }
2849
2850 /* Expand a call to one of the builtin math functions doing integer
2851 conversion (lrint).
2852 Return 0 if a normal call should be emitted rather than expanding the
2853 function in-line. EXP is the expression that is a call to the builtin
2854 function; if convenient, the result should be placed in TARGET. */
2855
2856 static rtx
2857 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2858 {
2859 convert_optab builtin_optab;
2860 rtx op0, insns;
2861 tree fndecl = get_callee_fndecl (exp);
2862 tree arg;
2863 enum machine_mode mode;
2864 enum built_in_function fallback_fn = BUILT_IN_NONE;
2865
2866 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2867 gcc_unreachable ();
2868
2869 arg = CALL_EXPR_ARG (exp, 0);
2870
2871 switch (DECL_FUNCTION_CODE (fndecl))
2872 {
2873 CASE_FLT_FN (BUILT_IN_IRINT):
2874 fallback_fn = BUILT_IN_LRINT;
2875 /* FALLTHRU */
2876 CASE_FLT_FN (BUILT_IN_LRINT):
2877 CASE_FLT_FN (BUILT_IN_LLRINT):
2878 builtin_optab = lrint_optab;
2879 break;
2880
2881 CASE_FLT_FN (BUILT_IN_IROUND):
2882 fallback_fn = BUILT_IN_LROUND;
2883 /* FALLTHRU */
2884 CASE_FLT_FN (BUILT_IN_LROUND):
2885 CASE_FLT_FN (BUILT_IN_LLROUND):
2886 builtin_optab = lround_optab;
2887 break;
2888
2889 default:
2890 gcc_unreachable ();
2891 }
2892
2893 /* There's no easy way to detect the case we need to set EDOM. */
2894 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2895 return NULL_RTX;
2896
2897 /* Make a suitable register to place result in. */
2898 mode = TYPE_MODE (TREE_TYPE (exp));
2899
2900 /* There's no easy way to detect the case we need to set EDOM. */
2901 if (!flag_errno_math)
2902 {
2903 rtx result = gen_reg_rtx (mode);
2904
2905 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2906 need to expand the argument again. This way, we will not perform
2907 side-effects more the once. */
2908 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2909
2910 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2911
2912 start_sequence ();
2913
2914 if (expand_sfix_optab (result, op0, builtin_optab))
2915 {
2916 /* Output the entire sequence. */
2917 insns = get_insns ();
2918 end_sequence ();
2919 emit_insn (insns);
2920 return result;
2921 }
2922
2923 /* If we were unable to expand via the builtin, stop the sequence
2924 (without outputting the insns) and call to the library function
2925 with the stabilized argument list. */
2926 end_sequence ();
2927 }
2928
2929 if (fallback_fn != BUILT_IN_NONE)
2930 {
2931 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2932 targets, (int) round (x) should never be transformed into
2933 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2934 a call to lround in the hope that the target provides at least some
2935 C99 functions. This should result in the best user experience for
2936 not full C99 targets. */
2937 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2938 fallback_fn, 0);
2939
2940 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2941 fallback_fndecl, 1, arg);
2942
2943 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2944 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2945 return convert_to_mode (mode, target, 0);
2946 }
2947
2948 return expand_call (exp, target, target == const0_rtx);
2949 }
2950
2951 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2952 a normal call should be emitted rather than expanding the function
2953 in-line. EXP is the expression that is a call to the builtin
2954 function; if convenient, the result should be placed in TARGET. */
2955
2956 static rtx
2957 expand_builtin_powi (tree exp, rtx target)
2958 {
2959 tree arg0, arg1;
2960 rtx op0, op1;
2961 enum machine_mode mode;
2962 enum machine_mode mode2;
2963
2964 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2965 return NULL_RTX;
2966
2967 arg0 = CALL_EXPR_ARG (exp, 0);
2968 arg1 = CALL_EXPR_ARG (exp, 1);
2969 mode = TYPE_MODE (TREE_TYPE (exp));
2970
2971 /* Emit a libcall to libgcc. */
2972
2973 /* Mode of the 2nd argument must match that of an int. */
2974 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2975
2976 if (target == NULL_RTX)
2977 target = gen_reg_rtx (mode);
2978
2979 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2980 if (GET_MODE (op0) != mode)
2981 op0 = convert_to_mode (mode, op0, 0);
2982 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2983 if (GET_MODE (op1) != mode2)
2984 op1 = convert_to_mode (mode2, op1, 0);
2985
2986 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2987 target, LCT_CONST, mode, 2,
2988 op0, mode, op1, mode2);
2989
2990 return target;
2991 }
2992
2993 /* Expand expression EXP which is a call to the strlen builtin. Return
2994 NULL_RTX if we failed the caller should emit a normal call, otherwise
2995 try to get the result in TARGET, if convenient. */
2996
2997 static rtx
2998 expand_builtin_strlen (tree exp, rtx target,
2999 enum machine_mode target_mode)
3000 {
3001 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3002 return NULL_RTX;
3003 else
3004 {
3005 struct expand_operand ops[4];
3006 rtx pat;
3007 tree len;
3008 tree src = CALL_EXPR_ARG (exp, 0);
3009 rtx src_reg, before_strlen;
3010 enum machine_mode insn_mode = target_mode;
3011 enum insn_code icode = CODE_FOR_nothing;
3012 unsigned int align;
3013
3014 /* If the length can be computed at compile-time, return it. */
3015 len = c_strlen (src, 0);
3016 if (len)
3017 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3018
3019 /* If the length can be computed at compile-time and is constant
3020 integer, but there are side-effects in src, evaluate
3021 src for side-effects, then return len.
3022 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3023 can be optimized into: i++; x = 3; */
3024 len = c_strlen (src, 1);
3025 if (len && TREE_CODE (len) == INTEGER_CST)
3026 {
3027 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3028 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3029 }
3030
3031 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3032
3033 /* If SRC is not a pointer type, don't do this operation inline. */
3034 if (align == 0)
3035 return NULL_RTX;
3036
3037 /* Bail out if we can't compute strlen in the right mode. */
3038 while (insn_mode != VOIDmode)
3039 {
3040 icode = optab_handler (strlen_optab, insn_mode);
3041 if (icode != CODE_FOR_nothing)
3042 break;
3043
3044 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3045 }
3046 if (insn_mode == VOIDmode)
3047 return NULL_RTX;
3048
3049 /* Make a place to hold the source address. We will not expand
3050 the actual source until we are sure that the expansion will
3051 not fail -- there are trees that cannot be expanded twice. */
3052 src_reg = gen_reg_rtx (Pmode);
3053
3054 /* Mark the beginning of the strlen sequence so we can emit the
3055 source operand later. */
3056 before_strlen = get_last_insn ();
3057
3058 create_output_operand (&ops[0], target, insn_mode);
3059 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3060 create_integer_operand (&ops[2], 0);
3061 create_integer_operand (&ops[3], align);
3062 if (!maybe_expand_insn (icode, 4, ops))
3063 return NULL_RTX;
3064
3065 /* Now that we are assured of success, expand the source. */
3066 start_sequence ();
3067 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3068 if (pat != src_reg)
3069 {
3070 #ifdef POINTERS_EXTEND_UNSIGNED
3071 if (GET_MODE (pat) != Pmode)
3072 pat = convert_to_mode (Pmode, pat,
3073 POINTERS_EXTEND_UNSIGNED);
3074 #endif
3075 emit_move_insn (src_reg, pat);
3076 }
3077 pat = get_insns ();
3078 end_sequence ();
3079
3080 if (before_strlen)
3081 emit_insn_after (pat, before_strlen);
3082 else
3083 emit_insn_before (pat, get_insns ());
3084
3085 /* Return the value in the proper mode for this function. */
3086 if (GET_MODE (ops[0].value) == target_mode)
3087 target = ops[0].value;
3088 else if (target != 0)
3089 convert_move (target, ops[0].value, 0);
3090 else
3091 target = convert_to_mode (target_mode, ops[0].value, 0);
3092
3093 return target;
3094 }
3095 }
3096
3097 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3098 bytes from constant string DATA + OFFSET and return it as target
3099 constant. */
3100
3101 static rtx
3102 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3103 enum machine_mode mode)
3104 {
3105 const char *str = (const char *) data;
3106
3107 gcc_assert (offset >= 0
3108 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3109 <= strlen (str) + 1));
3110
3111 return c_readstr (str + offset, mode);
3112 }
3113
3114 /* LEN specify length of the block of memcpy/memset operation.
3115 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3116 In some cases we can make very likely guess on max size, then we
3117 set it into PROBABLE_MAX_SIZE. */
3118
3119 static void
3120 determine_block_size (tree len, rtx len_rtx,
3121 unsigned HOST_WIDE_INT *min_size,
3122 unsigned HOST_WIDE_INT *max_size,
3123 unsigned HOST_WIDE_INT *probable_max_size)
3124 {
3125 if (CONST_INT_P (len_rtx))
3126 {
3127 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3128 return;
3129 }
3130 else
3131 {
3132 double_int min, max;
3133 enum value_range_type range_type = VR_UNDEFINED;
3134
3135 /* Determine bounds from the type. */
3136 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3137 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3138 else
3139 *min_size = 0;
3140 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3141 *probable_max_size = *max_size
3142 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3143 else
3144 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3145
3146 if (TREE_CODE (len) == SSA_NAME)
3147 range_type = get_range_info (len, &min, &max);
3148 if (range_type == VR_RANGE)
3149 {
3150 if (min.fits_uhwi () && *min_size < min.to_uhwi ())
3151 *min_size = min.to_uhwi ();
3152 if (max.fits_uhwi () && *max_size > max.to_uhwi ())
3153 *probable_max_size = *max_size = max.to_uhwi ();
3154 }
3155 else if (range_type == VR_ANTI_RANGE)
3156 {
3157 /* Anti range 0...N lets us to determine minimal size to N+1. */
3158 if (min.is_zero ())
3159 {
3160 if ((max + double_int_one).fits_uhwi ())
3161 *min_size = (max + double_int_one).to_uhwi ();
3162 }
3163 /* Code like
3164
3165 int n;
3166 if (n < 100)
3167 memcpy (a, b, n)
3168
3169 Produce anti range allowing negative values of N. We still
3170 can use the information and make a guess that N is not negative.
3171 */
3172 else if (!max.ule (double_int_one.lshift (30))
3173 && min.fits_uhwi ())
3174 *probable_max_size = min.to_uhwi () - 1;
3175 }
3176 }
3177 gcc_checking_assert (*max_size <=
3178 (unsigned HOST_WIDE_INT)
3179 GET_MODE_MASK (GET_MODE (len_rtx)));
3180 }
3181
3182 /* Expand a call EXP to the memcpy builtin.
3183 Return NULL_RTX if we failed, the caller should emit a normal call,
3184 otherwise try to get the result in TARGET, if convenient (and in
3185 mode MODE if that's convenient). */
3186
3187 static rtx
3188 expand_builtin_memcpy (tree exp, rtx target)
3189 {
3190 if (!validate_arglist (exp,
3191 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3192 return NULL_RTX;
3193 else
3194 {
3195 tree dest = CALL_EXPR_ARG (exp, 0);
3196 tree src = CALL_EXPR_ARG (exp, 1);
3197 tree len = CALL_EXPR_ARG (exp, 2);
3198 const char *src_str;
3199 unsigned int src_align = get_pointer_alignment (src);
3200 unsigned int dest_align = get_pointer_alignment (dest);
3201 rtx dest_mem, src_mem, dest_addr, len_rtx;
3202 HOST_WIDE_INT expected_size = -1;
3203 unsigned int expected_align = 0;
3204 unsigned HOST_WIDE_INT min_size;
3205 unsigned HOST_WIDE_INT max_size;
3206 unsigned HOST_WIDE_INT probable_max_size;
3207
3208 /* If DEST is not a pointer type, call the normal function. */
3209 if (dest_align == 0)
3210 return NULL_RTX;
3211
3212 /* If either SRC is not a pointer type, don't do this
3213 operation in-line. */
3214 if (src_align == 0)
3215 return NULL_RTX;
3216
3217 if (currently_expanding_gimple_stmt)
3218 stringop_block_profile (currently_expanding_gimple_stmt,
3219 &expected_align, &expected_size);
3220
3221 if (expected_align < dest_align)
3222 expected_align = dest_align;
3223 dest_mem = get_memory_rtx (dest, len);
3224 set_mem_align (dest_mem, dest_align);
3225 len_rtx = expand_normal (len);
3226 determine_block_size (len, len_rtx, &min_size, &max_size,
3227 &probable_max_size);
3228 src_str = c_getstr (src);
3229
3230 /* If SRC is a string constant and block move would be done
3231 by pieces, we can avoid loading the string from memory
3232 and only stored the computed constants. */
3233 if (src_str
3234 && CONST_INT_P (len_rtx)
3235 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3236 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3237 CONST_CAST (char *, src_str),
3238 dest_align, false))
3239 {
3240 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3241 builtin_memcpy_read_str,
3242 CONST_CAST (char *, src_str),
3243 dest_align, false, 0);
3244 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3245 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3246 return dest_mem;
3247 }
3248
3249 src_mem = get_memory_rtx (src, len);
3250 set_mem_align (src_mem, src_align);
3251
3252 /* Copy word part most expediently. */
3253 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3254 CALL_EXPR_TAILCALL (exp)
3255 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3256 expected_align, expected_size,
3257 min_size, max_size, probable_max_size);
3258
3259 if (dest_addr == 0)
3260 {
3261 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3262 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3263 }
3264 return dest_addr;
3265 }
3266 }
3267
3268 /* Expand a call EXP to the mempcpy builtin.
3269 Return NULL_RTX if we failed; the caller should emit a normal call,
3270 otherwise try to get the result in TARGET, if convenient (and in
3271 mode MODE if that's convenient). If ENDP is 0 return the
3272 destination pointer, if ENDP is 1 return the end pointer ala
3273 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3274 stpcpy. */
3275
3276 static rtx
3277 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3278 {
3279 if (!validate_arglist (exp,
3280 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3281 return NULL_RTX;
3282 else
3283 {
3284 tree dest = CALL_EXPR_ARG (exp, 0);
3285 tree src = CALL_EXPR_ARG (exp, 1);
3286 tree len = CALL_EXPR_ARG (exp, 2);
3287 return expand_builtin_mempcpy_args (dest, src, len,
3288 target, mode, /*endp=*/ 1);
3289 }
3290 }
3291
3292 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3293 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3294 so that this can also be called without constructing an actual CALL_EXPR.
3295 The other arguments and return value are the same as for
3296 expand_builtin_mempcpy. */
3297
3298 static rtx
3299 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3300 rtx target, enum machine_mode mode, int endp)
3301 {
3302 /* If return value is ignored, transform mempcpy into memcpy. */
3303 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3304 {
3305 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3306 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3307 dest, src, len);
3308 return expand_expr (result, target, mode, EXPAND_NORMAL);
3309 }
3310 else
3311 {
3312 const char *src_str;
3313 unsigned int src_align = get_pointer_alignment (src);
3314 unsigned int dest_align = get_pointer_alignment (dest);
3315 rtx dest_mem, src_mem, len_rtx;
3316
3317 /* If either SRC or DEST is not a pointer type, don't do this
3318 operation in-line. */
3319 if (dest_align == 0 || src_align == 0)
3320 return NULL_RTX;
3321
3322 /* If LEN is not constant, call the normal function. */
3323 if (! tree_fits_uhwi_p (len))
3324 return NULL_RTX;
3325
3326 len_rtx = expand_normal (len);
3327 src_str = c_getstr (src);
3328
3329 /* If SRC is a string constant and block move would be done
3330 by pieces, we can avoid loading the string from memory
3331 and only stored the computed constants. */
3332 if (src_str
3333 && CONST_INT_P (len_rtx)
3334 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3335 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3336 CONST_CAST (char *, src_str),
3337 dest_align, false))
3338 {
3339 dest_mem = get_memory_rtx (dest, len);
3340 set_mem_align (dest_mem, dest_align);
3341 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3342 builtin_memcpy_read_str,
3343 CONST_CAST (char *, src_str),
3344 dest_align, false, endp);
3345 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3346 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3347 return dest_mem;
3348 }
3349
3350 if (CONST_INT_P (len_rtx)
3351 && can_move_by_pieces (INTVAL (len_rtx),
3352 MIN (dest_align, src_align)))
3353 {
3354 dest_mem = get_memory_rtx (dest, len);
3355 set_mem_align (dest_mem, dest_align);
3356 src_mem = get_memory_rtx (src, len);
3357 set_mem_align (src_mem, src_align);
3358 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3359 MIN (dest_align, src_align), endp);
3360 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3361 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3362 return dest_mem;
3363 }
3364
3365 return NULL_RTX;
3366 }
3367 }
3368
3369 #ifndef HAVE_movstr
3370 # define HAVE_movstr 0
3371 # define CODE_FOR_movstr CODE_FOR_nothing
3372 #endif
3373
3374 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3375 we failed, the caller should emit a normal call, otherwise try to
3376 get the result in TARGET, if convenient. If ENDP is 0 return the
3377 destination pointer, if ENDP is 1 return the end pointer ala
3378 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3379 stpcpy. */
3380
3381 static rtx
3382 expand_movstr (tree dest, tree src, rtx target, int endp)
3383 {
3384 struct expand_operand ops[3];
3385 rtx dest_mem;
3386 rtx src_mem;
3387
3388 if (!HAVE_movstr)
3389 return NULL_RTX;
3390
3391 dest_mem = get_memory_rtx (dest, NULL);
3392 src_mem = get_memory_rtx (src, NULL);
3393 if (!endp)
3394 {
3395 target = force_reg (Pmode, XEXP (dest_mem, 0));
3396 dest_mem = replace_equiv_address (dest_mem, target);
3397 }
3398
3399 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3400 create_fixed_operand (&ops[1], dest_mem);
3401 create_fixed_operand (&ops[2], src_mem);
3402 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3403 return NULL_RTX;
3404
3405 if (endp && target != const0_rtx)
3406 {
3407 target = ops[0].value;
3408 /* movstr is supposed to set end to the address of the NUL
3409 terminator. If the caller requested a mempcpy-like return value,
3410 adjust it. */
3411 if (endp == 1)
3412 {
3413 rtx tem = plus_constant (GET_MODE (target),
3414 gen_lowpart (GET_MODE (target), target), 1);
3415 emit_move_insn (target, force_operand (tem, NULL_RTX));
3416 }
3417 }
3418 return target;
3419 }
3420
3421 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3422 NULL_RTX if we failed the caller should emit a normal call, otherwise
3423 try to get the result in TARGET, if convenient (and in mode MODE if that's
3424 convenient). */
3425
3426 static rtx
3427 expand_builtin_strcpy (tree exp, rtx target)
3428 {
3429 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3430 {
3431 tree dest = CALL_EXPR_ARG (exp, 0);
3432 tree src = CALL_EXPR_ARG (exp, 1);
3433 return expand_builtin_strcpy_args (dest, src, target);
3434 }
3435 return NULL_RTX;
3436 }
3437
3438 /* Helper function to do the actual work for expand_builtin_strcpy. The
3439 arguments to the builtin_strcpy call DEST and SRC are broken out
3440 so that this can also be called without constructing an actual CALL_EXPR.
3441 The other arguments and return value are the same as for
3442 expand_builtin_strcpy. */
3443
3444 static rtx
3445 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3446 {
3447 return expand_movstr (dest, src, target, /*endp=*/0);
3448 }
3449
3450 /* Expand a call EXP to the stpcpy builtin.
3451 Return NULL_RTX if we failed the caller should emit a normal call,
3452 otherwise try to get the result in TARGET, if convenient (and in
3453 mode MODE if that's convenient). */
3454
3455 static rtx
3456 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3457 {
3458 tree dst, src;
3459 location_t loc = EXPR_LOCATION (exp);
3460
3461 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3462 return NULL_RTX;
3463
3464 dst = CALL_EXPR_ARG (exp, 0);
3465 src = CALL_EXPR_ARG (exp, 1);
3466
3467 /* If return value is ignored, transform stpcpy into strcpy. */
3468 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3469 {
3470 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3471 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3472 return expand_expr (result, target, mode, EXPAND_NORMAL);
3473 }
3474 else
3475 {
3476 tree len, lenp1;
3477 rtx ret;
3478
3479 /* Ensure we get an actual string whose length can be evaluated at
3480 compile-time, not an expression containing a string. This is
3481 because the latter will potentially produce pessimized code
3482 when used to produce the return value. */
3483 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3484 return expand_movstr (dst, src, target, /*endp=*/2);
3485
3486 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3487 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3488 target, mode, /*endp=*/2);
3489
3490 if (ret)
3491 return ret;
3492
3493 if (TREE_CODE (len) == INTEGER_CST)
3494 {
3495 rtx len_rtx = expand_normal (len);
3496
3497 if (CONST_INT_P (len_rtx))
3498 {
3499 ret = expand_builtin_strcpy_args (dst, src, target);
3500
3501 if (ret)
3502 {
3503 if (! target)
3504 {
3505 if (mode != VOIDmode)
3506 target = gen_reg_rtx (mode);
3507 else
3508 target = gen_reg_rtx (GET_MODE (ret));
3509 }
3510 if (GET_MODE (target) != GET_MODE (ret))
3511 ret = gen_lowpart (GET_MODE (target), ret);
3512
3513 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3514 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3515 gcc_assert (ret);
3516
3517 return target;
3518 }
3519 }
3520 }
3521
3522 return expand_movstr (dst, src, target, /*endp=*/2);
3523 }
3524 }
3525
3526 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3527 bytes from constant string DATA + OFFSET and return it as target
3528 constant. */
3529
3530 rtx
3531 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3532 enum machine_mode mode)
3533 {
3534 const char *str = (const char *) data;
3535
3536 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3537 return const0_rtx;
3538
3539 return c_readstr (str + offset, mode);
3540 }
3541
3542 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3543 NULL_RTX if we failed the caller should emit a normal call. */
3544
3545 static rtx
3546 expand_builtin_strncpy (tree exp, rtx target)
3547 {
3548 location_t loc = EXPR_LOCATION (exp);
3549
3550 if (validate_arglist (exp,
3551 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3552 {
3553 tree dest = CALL_EXPR_ARG (exp, 0);
3554 tree src = CALL_EXPR_ARG (exp, 1);
3555 tree len = CALL_EXPR_ARG (exp, 2);
3556 tree slen = c_strlen (src, 1);
3557
3558 /* We must be passed a constant len and src parameter. */
3559 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3560 return NULL_RTX;
3561
3562 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3563
3564 /* We're required to pad with trailing zeros if the requested
3565 len is greater than strlen(s2)+1. In that case try to
3566 use store_by_pieces, if it fails, punt. */
3567 if (tree_int_cst_lt (slen, len))
3568 {
3569 unsigned int dest_align = get_pointer_alignment (dest);
3570 const char *p = c_getstr (src);
3571 rtx dest_mem;
3572
3573 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3574 || !can_store_by_pieces (tree_to_uhwi (len),
3575 builtin_strncpy_read_str,
3576 CONST_CAST (char *, p),
3577 dest_align, false))
3578 return NULL_RTX;
3579
3580 dest_mem = get_memory_rtx (dest, len);
3581 store_by_pieces (dest_mem, tree_to_uhwi (len),
3582 builtin_strncpy_read_str,
3583 CONST_CAST (char *, p), dest_align, false, 0);
3584 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3585 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3586 return dest_mem;
3587 }
3588 }
3589 return NULL_RTX;
3590 }
3591
3592 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3593 bytes from constant string DATA + OFFSET and return it as target
3594 constant. */
3595
3596 rtx
3597 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3598 enum machine_mode mode)
3599 {
3600 const char *c = (const char *) data;
3601 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3602
3603 memset (p, *c, GET_MODE_SIZE (mode));
3604
3605 return c_readstr (p, mode);
3606 }
3607
3608 /* Callback routine for store_by_pieces. Return the RTL of a register
3609 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3610 char value given in the RTL register data. For example, if mode is
3611 4 bytes wide, return the RTL for 0x01010101*data. */
3612
3613 static rtx
3614 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3615 enum machine_mode mode)
3616 {
3617 rtx target, coeff;
3618 size_t size;
3619 char *p;
3620
3621 size = GET_MODE_SIZE (mode);
3622 if (size == 1)
3623 return (rtx) data;
3624
3625 p = XALLOCAVEC (char, size);
3626 memset (p, 1, size);
3627 coeff = c_readstr (p, mode);
3628
3629 target = convert_to_mode (mode, (rtx) data, 1);
3630 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3631 return force_reg (mode, target);
3632 }
3633
3634 /* Expand expression EXP, which is a call to the memset builtin. Return
3635 NULL_RTX if we failed the caller should emit a normal call, otherwise
3636 try to get the result in TARGET, if convenient (and in mode MODE if that's
3637 convenient). */
3638
3639 static rtx
3640 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3641 {
3642 if (!validate_arglist (exp,
3643 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3644 return NULL_RTX;
3645 else
3646 {
3647 tree dest = CALL_EXPR_ARG (exp, 0);
3648 tree val = CALL_EXPR_ARG (exp, 1);
3649 tree len = CALL_EXPR_ARG (exp, 2);
3650 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3651 }
3652 }
3653
3654 /* Helper function to do the actual work for expand_builtin_memset. The
3655 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3656 so that this can also be called without constructing an actual CALL_EXPR.
3657 The other arguments and return value are the same as for
3658 expand_builtin_memset. */
3659
3660 static rtx
3661 expand_builtin_memset_args (tree dest, tree val, tree len,
3662 rtx target, enum machine_mode mode, tree orig_exp)
3663 {
3664 tree fndecl, fn;
3665 enum built_in_function fcode;
3666 enum machine_mode val_mode;
3667 char c;
3668 unsigned int dest_align;
3669 rtx dest_mem, dest_addr, len_rtx;
3670 HOST_WIDE_INT expected_size = -1;
3671 unsigned int expected_align = 0;
3672 unsigned HOST_WIDE_INT min_size;
3673 unsigned HOST_WIDE_INT max_size;
3674 unsigned HOST_WIDE_INT probable_max_size;
3675
3676 dest_align = get_pointer_alignment (dest);
3677
3678 /* If DEST is not a pointer type, don't do this operation in-line. */
3679 if (dest_align == 0)
3680 return NULL_RTX;
3681
3682 if (currently_expanding_gimple_stmt)
3683 stringop_block_profile (currently_expanding_gimple_stmt,
3684 &expected_align, &expected_size);
3685
3686 if (expected_align < dest_align)
3687 expected_align = dest_align;
3688
3689 /* If the LEN parameter is zero, return DEST. */
3690 if (integer_zerop (len))
3691 {
3692 /* Evaluate and ignore VAL in case it has side-effects. */
3693 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3694 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3695 }
3696
3697 /* Stabilize the arguments in case we fail. */
3698 dest = builtin_save_expr (dest);
3699 val = builtin_save_expr (val);
3700 len = builtin_save_expr (len);
3701
3702 len_rtx = expand_normal (len);
3703 determine_block_size (len, len_rtx, &min_size, &max_size,
3704 &probable_max_size);
3705 dest_mem = get_memory_rtx (dest, len);
3706 val_mode = TYPE_MODE (unsigned_char_type_node);
3707
3708 if (TREE_CODE (val) != INTEGER_CST)
3709 {
3710 rtx val_rtx;
3711
3712 val_rtx = expand_normal (val);
3713 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3714
3715 /* Assume that we can memset by pieces if we can store
3716 * the coefficients by pieces (in the required modes).
3717 * We can't pass builtin_memset_gen_str as that emits RTL. */
3718 c = 1;
3719 if (tree_fits_uhwi_p (len)
3720 && can_store_by_pieces (tree_to_uhwi (len),
3721 builtin_memset_read_str, &c, dest_align,
3722 true))
3723 {
3724 val_rtx = force_reg (val_mode, val_rtx);
3725 store_by_pieces (dest_mem, tree_to_uhwi (len),
3726 builtin_memset_gen_str, val_rtx, dest_align,
3727 true, 0);
3728 }
3729 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3730 dest_align, expected_align,
3731 expected_size, min_size, max_size,
3732 probable_max_size))
3733 goto do_libcall;
3734
3735 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3736 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3737 return dest_mem;
3738 }
3739
3740 if (target_char_cast (val, &c))
3741 goto do_libcall;
3742
3743 if (c)
3744 {
3745 if (tree_fits_uhwi_p (len)
3746 && can_store_by_pieces (tree_to_uhwi (len),
3747 builtin_memset_read_str, &c, dest_align,
3748 true))
3749 store_by_pieces (dest_mem, tree_to_uhwi (len),
3750 builtin_memset_read_str, &c, dest_align, true, 0);
3751 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3752 gen_int_mode (c, val_mode),
3753 dest_align, expected_align,
3754 expected_size, min_size, max_size,
3755 probable_max_size))
3756 goto do_libcall;
3757
3758 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3759 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3760 return dest_mem;
3761 }
3762
3763 set_mem_align (dest_mem, dest_align);
3764 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3765 CALL_EXPR_TAILCALL (orig_exp)
3766 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3767 expected_align, expected_size,
3768 min_size, max_size,
3769 probable_max_size);
3770
3771 if (dest_addr == 0)
3772 {
3773 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3774 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3775 }
3776
3777 return dest_addr;
3778
3779 do_libcall:
3780 fndecl = get_callee_fndecl (orig_exp);
3781 fcode = DECL_FUNCTION_CODE (fndecl);
3782 if (fcode == BUILT_IN_MEMSET)
3783 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3784 dest, val, len);
3785 else if (fcode == BUILT_IN_BZERO)
3786 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3787 dest, len);
3788 else
3789 gcc_unreachable ();
3790 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3791 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3792 return expand_call (fn, target, target == const0_rtx);
3793 }
3794
3795 /* Expand expression EXP, which is a call to the bzero builtin. Return
3796 NULL_RTX if we failed the caller should emit a normal call. */
3797
3798 static rtx
3799 expand_builtin_bzero (tree exp)
3800 {
3801 tree dest, size;
3802 location_t loc = EXPR_LOCATION (exp);
3803
3804 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3805 return NULL_RTX;
3806
3807 dest = CALL_EXPR_ARG (exp, 0);
3808 size = CALL_EXPR_ARG (exp, 1);
3809
3810 /* New argument list transforming bzero(ptr x, int y) to
3811 memset(ptr x, int 0, size_t y). This is done this way
3812 so that if it isn't expanded inline, we fallback to
3813 calling bzero instead of memset. */
3814
3815 return expand_builtin_memset_args (dest, integer_zero_node,
3816 fold_convert_loc (loc,
3817 size_type_node, size),
3818 const0_rtx, VOIDmode, exp);
3819 }
3820
3821 /* Expand expression EXP, which is a call to the memcmp built-in function.
3822 Return NULL_RTX if we failed and the caller should emit a normal call,
3823 otherwise try to get the result in TARGET, if convenient (and in mode
3824 MODE, if that's convenient). */
3825
3826 static rtx
3827 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3828 ATTRIBUTE_UNUSED enum machine_mode mode)
3829 {
3830 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3831
3832 if (!validate_arglist (exp,
3833 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3834 return NULL_RTX;
3835
3836 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3837 implementing memcmp because it will stop if it encounters two
3838 zero bytes. */
3839 #if defined HAVE_cmpmemsi
3840 {
3841 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3842 rtx result;
3843 rtx insn;
3844 tree arg1 = CALL_EXPR_ARG (exp, 0);
3845 tree arg2 = CALL_EXPR_ARG (exp, 1);
3846 tree len = CALL_EXPR_ARG (exp, 2);
3847
3848 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3849 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3850 enum machine_mode insn_mode;
3851
3852 if (HAVE_cmpmemsi)
3853 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3854 else
3855 return NULL_RTX;
3856
3857 /* If we don't have POINTER_TYPE, call the function. */
3858 if (arg1_align == 0 || arg2_align == 0)
3859 return NULL_RTX;
3860
3861 /* Make a place to write the result of the instruction. */
3862 result = target;
3863 if (! (result != 0
3864 && REG_P (result) && GET_MODE (result) == insn_mode
3865 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3866 result = gen_reg_rtx (insn_mode);
3867
3868 arg1_rtx = get_memory_rtx (arg1, len);
3869 arg2_rtx = get_memory_rtx (arg2, len);
3870 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3871
3872 /* Set MEM_SIZE as appropriate. */
3873 if (CONST_INT_P (arg3_rtx))
3874 {
3875 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3876 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3877 }
3878
3879 if (HAVE_cmpmemsi)
3880 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3881 GEN_INT (MIN (arg1_align, arg2_align)));
3882 else
3883 gcc_unreachable ();
3884
3885 if (insn)
3886 emit_insn (insn);
3887 else
3888 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3889 TYPE_MODE (integer_type_node), 3,
3890 XEXP (arg1_rtx, 0), Pmode,
3891 XEXP (arg2_rtx, 0), Pmode,
3892 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3893 TYPE_UNSIGNED (sizetype)),
3894 TYPE_MODE (sizetype));
3895
3896 /* Return the value in the proper mode for this function. */
3897 mode = TYPE_MODE (TREE_TYPE (exp));
3898 if (GET_MODE (result) == mode)
3899 return result;
3900 else if (target != 0)
3901 {
3902 convert_move (target, result, 0);
3903 return target;
3904 }
3905 else
3906 return convert_to_mode (mode, result, 0);
3907 }
3908 #endif /* HAVE_cmpmemsi. */
3909
3910 return NULL_RTX;
3911 }
3912
3913 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3914 if we failed the caller should emit a normal call, otherwise try to get
3915 the result in TARGET, if convenient. */
3916
3917 static rtx
3918 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3919 {
3920 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3921 return NULL_RTX;
3922
3923 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3924 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3925 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3926 {
3927 rtx arg1_rtx, arg2_rtx;
3928 rtx result, insn = NULL_RTX;
3929 tree fndecl, fn;
3930 tree arg1 = CALL_EXPR_ARG (exp, 0);
3931 tree arg2 = CALL_EXPR_ARG (exp, 1);
3932
3933 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3934 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3935
3936 /* If we don't have POINTER_TYPE, call the function. */
3937 if (arg1_align == 0 || arg2_align == 0)
3938 return NULL_RTX;
3939
3940 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3941 arg1 = builtin_save_expr (arg1);
3942 arg2 = builtin_save_expr (arg2);
3943
3944 arg1_rtx = get_memory_rtx (arg1, NULL);
3945 arg2_rtx = get_memory_rtx (arg2, NULL);
3946
3947 #ifdef HAVE_cmpstrsi
3948 /* Try to call cmpstrsi. */
3949 if (HAVE_cmpstrsi)
3950 {
3951 enum machine_mode insn_mode
3952 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3953
3954 /* Make a place to write the result of the instruction. */
3955 result = target;
3956 if (! (result != 0
3957 && REG_P (result) && GET_MODE (result) == insn_mode
3958 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3959 result = gen_reg_rtx (insn_mode);
3960
3961 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3962 GEN_INT (MIN (arg1_align, arg2_align)));
3963 }
3964 #endif
3965 #ifdef HAVE_cmpstrnsi
3966 /* Try to determine at least one length and call cmpstrnsi. */
3967 if (!insn && HAVE_cmpstrnsi)
3968 {
3969 tree len;
3970 rtx arg3_rtx;
3971
3972 enum machine_mode insn_mode
3973 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3974 tree len1 = c_strlen (arg1, 1);
3975 tree len2 = c_strlen (arg2, 1);
3976
3977 if (len1)
3978 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3979 if (len2)
3980 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3981
3982 /* If we don't have a constant length for the first, use the length
3983 of the second, if we know it. We don't require a constant for
3984 this case; some cost analysis could be done if both are available
3985 but neither is constant. For now, assume they're equally cheap,
3986 unless one has side effects. If both strings have constant lengths,
3987 use the smaller. */
3988
3989 if (!len1)
3990 len = len2;
3991 else if (!len2)
3992 len = len1;
3993 else if (TREE_SIDE_EFFECTS (len1))
3994 len = len2;
3995 else if (TREE_SIDE_EFFECTS (len2))
3996 len = len1;
3997 else if (TREE_CODE (len1) != INTEGER_CST)
3998 len = len2;
3999 else if (TREE_CODE (len2) != INTEGER_CST)
4000 len = len1;
4001 else if (tree_int_cst_lt (len1, len2))
4002 len = len1;
4003 else
4004 len = len2;
4005
4006 /* If both arguments have side effects, we cannot optimize. */
4007 if (!len || TREE_SIDE_EFFECTS (len))
4008 goto do_libcall;
4009
4010 arg3_rtx = expand_normal (len);
4011
4012 /* Make a place to write the result of the instruction. */
4013 result = target;
4014 if (! (result != 0
4015 && REG_P (result) && GET_MODE (result) == insn_mode
4016 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4017 result = gen_reg_rtx (insn_mode);
4018
4019 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4020 GEN_INT (MIN (arg1_align, arg2_align)));
4021 }
4022 #endif
4023
4024 if (insn)
4025 {
4026 enum machine_mode mode;
4027 emit_insn (insn);
4028
4029 /* Return the value in the proper mode for this function. */
4030 mode = TYPE_MODE (TREE_TYPE (exp));
4031 if (GET_MODE (result) == mode)
4032 return result;
4033 if (target == 0)
4034 return convert_to_mode (mode, result, 0);
4035 convert_move (target, result, 0);
4036 return target;
4037 }
4038
4039 /* Expand the library call ourselves using a stabilized argument
4040 list to avoid re-evaluating the function's arguments twice. */
4041 #ifdef HAVE_cmpstrnsi
4042 do_libcall:
4043 #endif
4044 fndecl = get_callee_fndecl (exp);
4045 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4046 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4047 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4048 return expand_call (fn, target, target == const0_rtx);
4049 }
4050 #endif
4051 return NULL_RTX;
4052 }
4053
4054 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4055 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4056 the result in TARGET, if convenient. */
4057
4058 static rtx
4059 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4060 ATTRIBUTE_UNUSED enum machine_mode mode)
4061 {
4062 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4063
4064 if (!validate_arglist (exp,
4065 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4066 return NULL_RTX;
4067
4068 /* If c_strlen can determine an expression for one of the string
4069 lengths, and it doesn't have side effects, then emit cmpstrnsi
4070 using length MIN(strlen(string)+1, arg3). */
4071 #ifdef HAVE_cmpstrnsi
4072 if (HAVE_cmpstrnsi)
4073 {
4074 tree len, len1, len2;
4075 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4076 rtx result, insn;
4077 tree fndecl, fn;
4078 tree arg1 = CALL_EXPR_ARG (exp, 0);
4079 tree arg2 = CALL_EXPR_ARG (exp, 1);
4080 tree arg3 = CALL_EXPR_ARG (exp, 2);
4081
4082 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4083 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4084 enum machine_mode insn_mode
4085 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4086
4087 len1 = c_strlen (arg1, 1);
4088 len2 = c_strlen (arg2, 1);
4089
4090 if (len1)
4091 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4092 if (len2)
4093 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4094
4095 /* If we don't have a constant length for the first, use the length
4096 of the second, if we know it. We don't require a constant for
4097 this case; some cost analysis could be done if both are available
4098 but neither is constant. For now, assume they're equally cheap,
4099 unless one has side effects. If both strings have constant lengths,
4100 use the smaller. */
4101
4102 if (!len1)
4103 len = len2;
4104 else if (!len2)
4105 len = len1;
4106 else if (TREE_SIDE_EFFECTS (len1))
4107 len = len2;
4108 else if (TREE_SIDE_EFFECTS (len2))
4109 len = len1;
4110 else if (TREE_CODE (len1) != INTEGER_CST)
4111 len = len2;
4112 else if (TREE_CODE (len2) != INTEGER_CST)
4113 len = len1;
4114 else if (tree_int_cst_lt (len1, len2))
4115 len = len1;
4116 else
4117 len = len2;
4118
4119 /* If both arguments have side effects, we cannot optimize. */
4120 if (!len || TREE_SIDE_EFFECTS (len))
4121 return NULL_RTX;
4122
4123 /* The actual new length parameter is MIN(len,arg3). */
4124 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4125 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4126
4127 /* If we don't have POINTER_TYPE, call the function. */
4128 if (arg1_align == 0 || arg2_align == 0)
4129 return NULL_RTX;
4130
4131 /* Make a place to write the result of the instruction. */
4132 result = target;
4133 if (! (result != 0
4134 && REG_P (result) && GET_MODE (result) == insn_mode
4135 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4136 result = gen_reg_rtx (insn_mode);
4137
4138 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4139 arg1 = builtin_save_expr (arg1);
4140 arg2 = builtin_save_expr (arg2);
4141 len = builtin_save_expr (len);
4142
4143 arg1_rtx = get_memory_rtx (arg1, len);
4144 arg2_rtx = get_memory_rtx (arg2, len);
4145 arg3_rtx = expand_normal (len);
4146 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4147 GEN_INT (MIN (arg1_align, arg2_align)));
4148 if (insn)
4149 {
4150 emit_insn (insn);
4151
4152 /* Return the value in the proper mode for this function. */
4153 mode = TYPE_MODE (TREE_TYPE (exp));
4154 if (GET_MODE (result) == mode)
4155 return result;
4156 if (target == 0)
4157 return convert_to_mode (mode, result, 0);
4158 convert_move (target, result, 0);
4159 return target;
4160 }
4161
4162 /* Expand the library call ourselves using a stabilized argument
4163 list to avoid re-evaluating the function's arguments twice. */
4164 fndecl = get_callee_fndecl (exp);
4165 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4166 arg1, arg2, len);
4167 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4168 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4169 return expand_call (fn, target, target == const0_rtx);
4170 }
4171 #endif
4172 return NULL_RTX;
4173 }
4174
4175 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4176 if that's convenient. */
4177
4178 rtx
4179 expand_builtin_saveregs (void)
4180 {
4181 rtx val, seq;
4182
4183 /* Don't do __builtin_saveregs more than once in a function.
4184 Save the result of the first call and reuse it. */
4185 if (saveregs_value != 0)
4186 return saveregs_value;
4187
4188 /* When this function is called, it means that registers must be
4189 saved on entry to this function. So we migrate the call to the
4190 first insn of this function. */
4191
4192 start_sequence ();
4193
4194 /* Do whatever the machine needs done in this case. */
4195 val = targetm.calls.expand_builtin_saveregs ();
4196
4197 seq = get_insns ();
4198 end_sequence ();
4199
4200 saveregs_value = val;
4201
4202 /* Put the insns after the NOTE that starts the function. If this
4203 is inside a start_sequence, make the outer-level insn chain current, so
4204 the code is placed at the start of the function. */
4205 push_topmost_sequence ();
4206 emit_insn_after (seq, entry_of_function ());
4207 pop_topmost_sequence ();
4208
4209 return val;
4210 }
4211
4212 /* Expand a call to __builtin_next_arg. */
4213
4214 static rtx
4215 expand_builtin_next_arg (void)
4216 {
4217 /* Checking arguments is already done in fold_builtin_next_arg
4218 that must be called before this function. */
4219 return expand_binop (ptr_mode, add_optab,
4220 crtl->args.internal_arg_pointer,
4221 crtl->args.arg_offset_rtx,
4222 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4223 }
4224
4225 /* Make it easier for the backends by protecting the valist argument
4226 from multiple evaluations. */
4227
4228 static tree
4229 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4230 {
4231 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4232
4233 /* The current way of determining the type of valist is completely
4234 bogus. We should have the information on the va builtin instead. */
4235 if (!vatype)
4236 vatype = targetm.fn_abi_va_list (cfun->decl);
4237
4238 if (TREE_CODE (vatype) == ARRAY_TYPE)
4239 {
4240 if (TREE_SIDE_EFFECTS (valist))
4241 valist = save_expr (valist);
4242
4243 /* For this case, the backends will be expecting a pointer to
4244 vatype, but it's possible we've actually been given an array
4245 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4246 So fix it. */
4247 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4248 {
4249 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4250 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4251 }
4252 }
4253 else
4254 {
4255 tree pt = build_pointer_type (vatype);
4256
4257 if (! needs_lvalue)
4258 {
4259 if (! TREE_SIDE_EFFECTS (valist))
4260 return valist;
4261
4262 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4263 TREE_SIDE_EFFECTS (valist) = 1;
4264 }
4265
4266 if (TREE_SIDE_EFFECTS (valist))
4267 valist = save_expr (valist);
4268 valist = fold_build2_loc (loc, MEM_REF,
4269 vatype, valist, build_int_cst (pt, 0));
4270 }
4271
4272 return valist;
4273 }
4274
4275 /* The "standard" definition of va_list is void*. */
4276
4277 tree
4278 std_build_builtin_va_list (void)
4279 {
4280 return ptr_type_node;
4281 }
4282
4283 /* The "standard" abi va_list is va_list_type_node. */
4284
4285 tree
4286 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4287 {
4288 return va_list_type_node;
4289 }
4290
4291 /* The "standard" type of va_list is va_list_type_node. */
4292
4293 tree
4294 std_canonical_va_list_type (tree type)
4295 {
4296 tree wtype, htype;
4297
4298 if (INDIRECT_REF_P (type))
4299 type = TREE_TYPE (type);
4300 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4301 type = TREE_TYPE (type);
4302 wtype = va_list_type_node;
4303 htype = type;
4304 /* Treat structure va_list types. */
4305 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4306 htype = TREE_TYPE (htype);
4307 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4308 {
4309 /* If va_list is an array type, the argument may have decayed
4310 to a pointer type, e.g. by being passed to another function.
4311 In that case, unwrap both types so that we can compare the
4312 underlying records. */
4313 if (TREE_CODE (htype) == ARRAY_TYPE
4314 || POINTER_TYPE_P (htype))
4315 {
4316 wtype = TREE_TYPE (wtype);
4317 htype = TREE_TYPE (htype);
4318 }
4319 }
4320 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4321 return va_list_type_node;
4322
4323 return NULL_TREE;
4324 }
4325
4326 /* The "standard" implementation of va_start: just assign `nextarg' to
4327 the variable. */
4328
4329 void
4330 std_expand_builtin_va_start (tree valist, rtx nextarg)
4331 {
4332 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4333 convert_move (va_r, nextarg, 0);
4334 }
4335
4336 /* Expand EXP, a call to __builtin_va_start. */
4337
4338 static rtx
4339 expand_builtin_va_start (tree exp)
4340 {
4341 rtx nextarg;
4342 tree valist;
4343 location_t loc = EXPR_LOCATION (exp);
4344
4345 if (call_expr_nargs (exp) < 2)
4346 {
4347 error_at (loc, "too few arguments to function %<va_start%>");
4348 return const0_rtx;
4349 }
4350
4351 if (fold_builtin_next_arg (exp, true))
4352 return const0_rtx;
4353
4354 nextarg = expand_builtin_next_arg ();
4355 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4356
4357 if (targetm.expand_builtin_va_start)
4358 targetm.expand_builtin_va_start (valist, nextarg);
4359 else
4360 std_expand_builtin_va_start (valist, nextarg);
4361
4362 return const0_rtx;
4363 }
4364
4365 /* Expand EXP, a call to __builtin_va_end. */
4366
4367 static rtx
4368 expand_builtin_va_end (tree exp)
4369 {
4370 tree valist = CALL_EXPR_ARG (exp, 0);
4371
4372 /* Evaluate for side effects, if needed. I hate macros that don't
4373 do that. */
4374 if (TREE_SIDE_EFFECTS (valist))
4375 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4376
4377 return const0_rtx;
4378 }
4379
4380 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4381 builtin rather than just as an assignment in stdarg.h because of the
4382 nastiness of array-type va_list types. */
4383
4384 static rtx
4385 expand_builtin_va_copy (tree exp)
4386 {
4387 tree dst, src, t;
4388 location_t loc = EXPR_LOCATION (exp);
4389
4390 dst = CALL_EXPR_ARG (exp, 0);
4391 src = CALL_EXPR_ARG (exp, 1);
4392
4393 dst = stabilize_va_list_loc (loc, dst, 1);
4394 src = stabilize_va_list_loc (loc, src, 0);
4395
4396 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4397
4398 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4399 {
4400 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4401 TREE_SIDE_EFFECTS (t) = 1;
4402 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4403 }
4404 else
4405 {
4406 rtx dstb, srcb, size;
4407
4408 /* Evaluate to pointers. */
4409 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4410 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4411 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4412 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4413
4414 dstb = convert_memory_address (Pmode, dstb);
4415 srcb = convert_memory_address (Pmode, srcb);
4416
4417 /* "Dereference" to BLKmode memories. */
4418 dstb = gen_rtx_MEM (BLKmode, dstb);
4419 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4420 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4421 srcb = gen_rtx_MEM (BLKmode, srcb);
4422 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4423 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4424
4425 /* Copy. */
4426 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4427 }
4428
4429 return const0_rtx;
4430 }
4431
4432 /* Expand a call to one of the builtin functions __builtin_frame_address or
4433 __builtin_return_address. */
4434
4435 static rtx
4436 expand_builtin_frame_address (tree fndecl, tree exp)
4437 {
4438 /* The argument must be a nonnegative integer constant.
4439 It counts the number of frames to scan up the stack.
4440 The value is the return address saved in that frame. */
4441 if (call_expr_nargs (exp) == 0)
4442 /* Warning about missing arg was already issued. */
4443 return const0_rtx;
4444 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4445 {
4446 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4447 error ("invalid argument to %<__builtin_frame_address%>");
4448 else
4449 error ("invalid argument to %<__builtin_return_address%>");
4450 return const0_rtx;
4451 }
4452 else
4453 {
4454 rtx tem
4455 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4456 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4457
4458 /* Some ports cannot access arbitrary stack frames. */
4459 if (tem == NULL)
4460 {
4461 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4462 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4463 else
4464 warning (0, "unsupported argument to %<__builtin_return_address%>");
4465 return const0_rtx;
4466 }
4467
4468 /* For __builtin_frame_address, return what we've got. */
4469 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4470 return tem;
4471
4472 if (!REG_P (tem)
4473 && ! CONSTANT_P (tem))
4474 tem = copy_addr_to_reg (tem);
4475 return tem;
4476 }
4477 }
4478
4479 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4480 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4481 is the same as for allocate_dynamic_stack_space. */
4482
4483 static rtx
4484 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4485 {
4486 rtx op0;
4487 rtx result;
4488 bool valid_arglist;
4489 unsigned int align;
4490 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4491 == BUILT_IN_ALLOCA_WITH_ALIGN);
4492
4493 valid_arglist
4494 = (alloca_with_align
4495 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4496 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4497
4498 if (!valid_arglist)
4499 return NULL_RTX;
4500
4501 /* Compute the argument. */
4502 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4503
4504 /* Compute the alignment. */
4505 align = (alloca_with_align
4506 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4507 : BIGGEST_ALIGNMENT);
4508
4509 /* Allocate the desired space. */
4510 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4511 result = convert_memory_address (ptr_mode, result);
4512
4513 return result;
4514 }
4515
4516 /* Expand a call to bswap builtin in EXP.
4517 Return NULL_RTX if a normal call should be emitted rather than expanding the
4518 function in-line. If convenient, the result should be placed in TARGET.
4519 SUBTARGET may be used as the target for computing one of EXP's operands. */
4520
4521 static rtx
4522 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4523 rtx subtarget)
4524 {
4525 tree arg;
4526 rtx op0;
4527
4528 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4529 return NULL_RTX;
4530
4531 arg = CALL_EXPR_ARG (exp, 0);
4532 op0 = expand_expr (arg,
4533 subtarget && GET_MODE (subtarget) == target_mode
4534 ? subtarget : NULL_RTX,
4535 target_mode, EXPAND_NORMAL);
4536 if (GET_MODE (op0) != target_mode)
4537 op0 = convert_to_mode (target_mode, op0, 1);
4538
4539 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4540
4541 gcc_assert (target);
4542
4543 return convert_to_mode (target_mode, target, 1);
4544 }
4545
4546 /* Expand a call to a unary builtin in EXP.
4547 Return NULL_RTX if a normal call should be emitted rather than expanding the
4548 function in-line. If convenient, the result should be placed in TARGET.
4549 SUBTARGET may be used as the target for computing one of EXP's operands. */
4550
4551 static rtx
4552 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4553 rtx subtarget, optab op_optab)
4554 {
4555 rtx op0;
4556
4557 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4558 return NULL_RTX;
4559
4560 /* Compute the argument. */
4561 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4562 (subtarget
4563 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4564 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4565 VOIDmode, EXPAND_NORMAL);
4566 /* Compute op, into TARGET if possible.
4567 Set TARGET to wherever the result comes back. */
4568 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4569 op_optab, op0, target, op_optab != clrsb_optab);
4570 gcc_assert (target);
4571
4572 return convert_to_mode (target_mode, target, 0);
4573 }
4574
4575 /* Expand a call to __builtin_expect. We just return our argument
4576 as the builtin_expect semantic should've been already executed by
4577 tree branch prediction pass. */
4578
4579 static rtx
4580 expand_builtin_expect (tree exp, rtx target)
4581 {
4582 tree arg;
4583
4584 if (call_expr_nargs (exp) < 2)
4585 return const0_rtx;
4586 arg = CALL_EXPR_ARG (exp, 0);
4587
4588 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4589 /* When guessing was done, the hints should be already stripped away. */
4590 gcc_assert (!flag_guess_branch_prob
4591 || optimize == 0 || seen_error ());
4592 return target;
4593 }
4594
4595 /* Expand a call to __builtin_assume_aligned. We just return our first
4596 argument as the builtin_assume_aligned semantic should've been already
4597 executed by CCP. */
4598
4599 static rtx
4600 expand_builtin_assume_aligned (tree exp, rtx target)
4601 {
4602 if (call_expr_nargs (exp) < 2)
4603 return const0_rtx;
4604 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4605 EXPAND_NORMAL);
4606 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4607 && (call_expr_nargs (exp) < 3
4608 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4609 return target;
4610 }
4611
4612 void
4613 expand_builtin_trap (void)
4614 {
4615 #ifdef HAVE_trap
4616 if (HAVE_trap)
4617 {
4618 rtx insn = emit_insn (gen_trap ());
4619 /* For trap insns when not accumulating outgoing args force
4620 REG_ARGS_SIZE note to prevent crossjumping of calls with
4621 different args sizes. */
4622 if (!ACCUMULATE_OUTGOING_ARGS)
4623 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4624 }
4625 else
4626 #endif
4627 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4628 emit_barrier ();
4629 }
4630
4631 /* Expand a call to __builtin_unreachable. We do nothing except emit
4632 a barrier saying that control flow will not pass here.
4633
4634 It is the responsibility of the program being compiled to ensure
4635 that control flow does never reach __builtin_unreachable. */
4636 static void
4637 expand_builtin_unreachable (void)
4638 {
4639 emit_barrier ();
4640 }
4641
4642 /* Expand EXP, a call to fabs, fabsf or fabsl.
4643 Return NULL_RTX if a normal call should be emitted rather than expanding
4644 the function inline. If convenient, the result should be placed
4645 in TARGET. SUBTARGET may be used as the target for computing
4646 the operand. */
4647
4648 static rtx
4649 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4650 {
4651 enum machine_mode mode;
4652 tree arg;
4653 rtx op0;
4654
4655 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4656 return NULL_RTX;
4657
4658 arg = CALL_EXPR_ARG (exp, 0);
4659 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4660 mode = TYPE_MODE (TREE_TYPE (arg));
4661 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4662 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4663 }
4664
4665 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4666 Return NULL is a normal call should be emitted rather than expanding the
4667 function inline. If convenient, the result should be placed in TARGET.
4668 SUBTARGET may be used as the target for computing the operand. */
4669
4670 static rtx
4671 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4672 {
4673 rtx op0, op1;
4674 tree arg;
4675
4676 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4677 return NULL_RTX;
4678
4679 arg = CALL_EXPR_ARG (exp, 0);
4680 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4681
4682 arg = CALL_EXPR_ARG (exp, 1);
4683 op1 = expand_normal (arg);
4684
4685 return expand_copysign (op0, op1, target);
4686 }
4687
4688 /* Create a new constant string literal and return a char* pointer to it.
4689 The STRING_CST value is the LEN characters at STR. */
4690 tree
4691 build_string_literal (int len, const char *str)
4692 {
4693 tree t, elem, index, type;
4694
4695 t = build_string (len, str);
4696 elem = build_type_variant (char_type_node, 1, 0);
4697 index = build_index_type (size_int (len - 1));
4698 type = build_array_type (elem, index);
4699 TREE_TYPE (t) = type;
4700 TREE_CONSTANT (t) = 1;
4701 TREE_READONLY (t) = 1;
4702 TREE_STATIC (t) = 1;
4703
4704 type = build_pointer_type (elem);
4705 t = build1 (ADDR_EXPR, type,
4706 build4 (ARRAY_REF, elem,
4707 t, integer_zero_node, NULL_TREE, NULL_TREE));
4708 return t;
4709 }
4710
4711 /* Expand a call to __builtin___clear_cache. */
4712
4713 static rtx
4714 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4715 {
4716 #ifndef HAVE_clear_cache
4717 #ifdef CLEAR_INSN_CACHE
4718 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4719 does something. Just do the default expansion to a call to
4720 __clear_cache(). */
4721 return NULL_RTX;
4722 #else
4723 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4724 does nothing. There is no need to call it. Do nothing. */
4725 return const0_rtx;
4726 #endif /* CLEAR_INSN_CACHE */
4727 #else
4728 /* We have a "clear_cache" insn, and it will handle everything. */
4729 tree begin, end;
4730 rtx begin_rtx, end_rtx;
4731
4732 /* We must not expand to a library call. If we did, any
4733 fallback library function in libgcc that might contain a call to
4734 __builtin___clear_cache() would recurse infinitely. */
4735 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4736 {
4737 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4738 return const0_rtx;
4739 }
4740
4741 if (HAVE_clear_cache)
4742 {
4743 struct expand_operand ops[2];
4744
4745 begin = CALL_EXPR_ARG (exp, 0);
4746 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4747
4748 end = CALL_EXPR_ARG (exp, 1);
4749 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4750
4751 create_address_operand (&ops[0], begin_rtx);
4752 create_address_operand (&ops[1], end_rtx);
4753 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4754 return const0_rtx;
4755 }
4756 return const0_rtx;
4757 #endif /* HAVE_clear_cache */
4758 }
4759
4760 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4761
4762 static rtx
4763 round_trampoline_addr (rtx tramp)
4764 {
4765 rtx temp, addend, mask;
4766
4767 /* If we don't need too much alignment, we'll have been guaranteed
4768 proper alignment by get_trampoline_type. */
4769 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4770 return tramp;
4771
4772 /* Round address up to desired boundary. */
4773 temp = gen_reg_rtx (Pmode);
4774 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4775 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4776
4777 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4778 temp, 0, OPTAB_LIB_WIDEN);
4779 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4780 temp, 0, OPTAB_LIB_WIDEN);
4781
4782 return tramp;
4783 }
4784
4785 static rtx
4786 expand_builtin_init_trampoline (tree exp, bool onstack)
4787 {
4788 tree t_tramp, t_func, t_chain;
4789 rtx m_tramp, r_tramp, r_chain, tmp;
4790
4791 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4792 POINTER_TYPE, VOID_TYPE))
4793 return NULL_RTX;
4794
4795 t_tramp = CALL_EXPR_ARG (exp, 0);
4796 t_func = CALL_EXPR_ARG (exp, 1);
4797 t_chain = CALL_EXPR_ARG (exp, 2);
4798
4799 r_tramp = expand_normal (t_tramp);
4800 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4801 MEM_NOTRAP_P (m_tramp) = 1;
4802
4803 /* If ONSTACK, the TRAMP argument should be the address of a field
4804 within the local function's FRAME decl. Either way, let's see if
4805 we can fill in the MEM_ATTRs for this memory. */
4806 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4807 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4808
4809 /* Creator of a heap trampoline is responsible for making sure the
4810 address is aligned to at least STACK_BOUNDARY. Normally malloc
4811 will ensure this anyhow. */
4812 tmp = round_trampoline_addr (r_tramp);
4813 if (tmp != r_tramp)
4814 {
4815 m_tramp = change_address (m_tramp, BLKmode, tmp);
4816 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4817 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4818 }
4819
4820 /* The FUNC argument should be the address of the nested function.
4821 Extract the actual function decl to pass to the hook. */
4822 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4823 t_func = TREE_OPERAND (t_func, 0);
4824 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4825
4826 r_chain = expand_normal (t_chain);
4827
4828 /* Generate insns to initialize the trampoline. */
4829 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4830
4831 if (onstack)
4832 {
4833 trampolines_created = 1;
4834
4835 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4836 "trampoline generated for nested function %qD", t_func);
4837 }
4838
4839 return const0_rtx;
4840 }
4841
4842 static rtx
4843 expand_builtin_adjust_trampoline (tree exp)
4844 {
4845 rtx tramp;
4846
4847 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4848 return NULL_RTX;
4849
4850 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4851 tramp = round_trampoline_addr (tramp);
4852 if (targetm.calls.trampoline_adjust_address)
4853 tramp = targetm.calls.trampoline_adjust_address (tramp);
4854
4855 return tramp;
4856 }
4857
4858 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4859 function. The function first checks whether the back end provides
4860 an insn to implement signbit for the respective mode. If not, it
4861 checks whether the floating point format of the value is such that
4862 the sign bit can be extracted. If that is not the case, the
4863 function returns NULL_RTX to indicate that a normal call should be
4864 emitted rather than expanding the function in-line. EXP is the
4865 expression that is a call to the builtin function; if convenient,
4866 the result should be placed in TARGET. */
4867 static rtx
4868 expand_builtin_signbit (tree exp, rtx target)
4869 {
4870 const struct real_format *fmt;
4871 enum machine_mode fmode, imode, rmode;
4872 tree arg;
4873 int word, bitpos;
4874 enum insn_code icode;
4875 rtx temp;
4876 location_t loc = EXPR_LOCATION (exp);
4877
4878 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4879 return NULL_RTX;
4880
4881 arg = CALL_EXPR_ARG (exp, 0);
4882 fmode = TYPE_MODE (TREE_TYPE (arg));
4883 rmode = TYPE_MODE (TREE_TYPE (exp));
4884 fmt = REAL_MODE_FORMAT (fmode);
4885
4886 arg = builtin_save_expr (arg);
4887
4888 /* Expand the argument yielding a RTX expression. */
4889 temp = expand_normal (arg);
4890
4891 /* Check if the back end provides an insn that handles signbit for the
4892 argument's mode. */
4893 icode = optab_handler (signbit_optab, fmode);
4894 if (icode != CODE_FOR_nothing)
4895 {
4896 rtx last = get_last_insn ();
4897 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4898 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4899 return target;
4900 delete_insns_since (last);
4901 }
4902
4903 /* For floating point formats without a sign bit, implement signbit
4904 as "ARG < 0.0". */
4905 bitpos = fmt->signbit_ro;
4906 if (bitpos < 0)
4907 {
4908 /* But we can't do this if the format supports signed zero. */
4909 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4910 return NULL_RTX;
4911
4912 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4913 build_real (TREE_TYPE (arg), dconst0));
4914 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4915 }
4916
4917 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4918 {
4919 imode = int_mode_for_mode (fmode);
4920 if (imode == BLKmode)
4921 return NULL_RTX;
4922 temp = gen_lowpart (imode, temp);
4923 }
4924 else
4925 {
4926 imode = word_mode;
4927 /* Handle targets with different FP word orders. */
4928 if (FLOAT_WORDS_BIG_ENDIAN)
4929 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4930 else
4931 word = bitpos / BITS_PER_WORD;
4932 temp = operand_subword_force (temp, word, fmode);
4933 bitpos = bitpos % BITS_PER_WORD;
4934 }
4935
4936 /* Force the intermediate word_mode (or narrower) result into a
4937 register. This avoids attempting to create paradoxical SUBREGs
4938 of floating point modes below. */
4939 temp = force_reg (imode, temp);
4940
4941 /* If the bitpos is within the "result mode" lowpart, the operation
4942 can be implement with a single bitwise AND. Otherwise, we need
4943 a right shift and an AND. */
4944
4945 if (bitpos < GET_MODE_BITSIZE (rmode))
4946 {
4947 double_int mask = double_int_zero.set_bit (bitpos);
4948
4949 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4950 temp = gen_lowpart (rmode, temp);
4951 temp = expand_binop (rmode, and_optab, temp,
4952 immed_double_int_const (mask, rmode),
4953 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4954 }
4955 else
4956 {
4957 /* Perform a logical right shift to place the signbit in the least
4958 significant bit, then truncate the result to the desired mode
4959 and mask just this bit. */
4960 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4961 temp = gen_lowpart (rmode, temp);
4962 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4963 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4964 }
4965
4966 return temp;
4967 }
4968
4969 /* Expand fork or exec calls. TARGET is the desired target of the
4970 call. EXP is the call. FN is the
4971 identificator of the actual function. IGNORE is nonzero if the
4972 value is to be ignored. */
4973
4974 static rtx
4975 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4976 {
4977 tree id, decl;
4978 tree call;
4979
4980 /* If we are not profiling, just call the function. */
4981 if (!profile_arc_flag)
4982 return NULL_RTX;
4983
4984 /* Otherwise call the wrapper. This should be equivalent for the rest of
4985 compiler, so the code does not diverge, and the wrapper may run the
4986 code necessary for keeping the profiling sane. */
4987
4988 switch (DECL_FUNCTION_CODE (fn))
4989 {
4990 case BUILT_IN_FORK:
4991 id = get_identifier ("__gcov_fork");
4992 break;
4993
4994 case BUILT_IN_EXECL:
4995 id = get_identifier ("__gcov_execl");
4996 break;
4997
4998 case BUILT_IN_EXECV:
4999 id = get_identifier ("__gcov_execv");
5000 break;
5001
5002 case BUILT_IN_EXECLP:
5003 id = get_identifier ("__gcov_execlp");
5004 break;
5005
5006 case BUILT_IN_EXECLE:
5007 id = get_identifier ("__gcov_execle");
5008 break;
5009
5010 case BUILT_IN_EXECVP:
5011 id = get_identifier ("__gcov_execvp");
5012 break;
5013
5014 case BUILT_IN_EXECVE:
5015 id = get_identifier ("__gcov_execve");
5016 break;
5017
5018 default:
5019 gcc_unreachable ();
5020 }
5021
5022 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5023 FUNCTION_DECL, id, TREE_TYPE (fn));
5024 DECL_EXTERNAL (decl) = 1;
5025 TREE_PUBLIC (decl) = 1;
5026 DECL_ARTIFICIAL (decl) = 1;
5027 TREE_NOTHROW (decl) = 1;
5028 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5029 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5030 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5031 return expand_call (call, target, ignore);
5032 }
5033
5034
5035 \f
5036 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5037 the pointer in these functions is void*, the tree optimizers may remove
5038 casts. The mode computed in expand_builtin isn't reliable either, due
5039 to __sync_bool_compare_and_swap.
5040
5041 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5042 group of builtins. This gives us log2 of the mode size. */
5043
5044 static inline enum machine_mode
5045 get_builtin_sync_mode (int fcode_diff)
5046 {
5047 /* The size is not negotiable, so ask not to get BLKmode in return
5048 if the target indicates that a smaller size would be better. */
5049 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5050 }
5051
5052 /* Expand the memory expression LOC and return the appropriate memory operand
5053 for the builtin_sync operations. */
5054
5055 static rtx
5056 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5057 {
5058 rtx addr, mem;
5059
5060 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5061 addr = convert_memory_address (Pmode, addr);
5062
5063 /* Note that we explicitly do not want any alias information for this
5064 memory, so that we kill all other live memories. Otherwise we don't
5065 satisfy the full barrier semantics of the intrinsic. */
5066 mem = validize_mem (gen_rtx_MEM (mode, addr));
5067
5068 /* The alignment needs to be at least according to that of the mode. */
5069 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5070 get_pointer_alignment (loc)));
5071 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5072 MEM_VOLATILE_P (mem) = 1;
5073
5074 return mem;
5075 }
5076
5077 /* Make sure an argument is in the right mode.
5078 EXP is the tree argument.
5079 MODE is the mode it should be in. */
5080
5081 static rtx
5082 expand_expr_force_mode (tree exp, enum machine_mode mode)
5083 {
5084 rtx val;
5085 enum machine_mode old_mode;
5086
5087 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5088 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5089 of CONST_INTs, where we know the old_mode only from the call argument. */
5090
5091 old_mode = GET_MODE (val);
5092 if (old_mode == VOIDmode)
5093 old_mode = TYPE_MODE (TREE_TYPE (exp));
5094 val = convert_modes (mode, old_mode, val, 1);
5095 return val;
5096 }
5097
5098
5099 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5100 EXP is the CALL_EXPR. CODE is the rtx code
5101 that corresponds to the arithmetic or logical operation from the name;
5102 an exception here is that NOT actually means NAND. TARGET is an optional
5103 place for us to store the results; AFTER is true if this is the
5104 fetch_and_xxx form. */
5105
5106 static rtx
5107 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5108 enum rtx_code code, bool after,
5109 rtx target)
5110 {
5111 rtx val, mem;
5112 location_t loc = EXPR_LOCATION (exp);
5113
5114 if (code == NOT && warn_sync_nand)
5115 {
5116 tree fndecl = get_callee_fndecl (exp);
5117 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5118
5119 static bool warned_f_a_n, warned_n_a_f;
5120
5121 switch (fcode)
5122 {
5123 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5124 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5125 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5126 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5127 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5128 if (warned_f_a_n)
5129 break;
5130
5131 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5132 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5133 warned_f_a_n = true;
5134 break;
5135
5136 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5137 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5138 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5139 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5140 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5141 if (warned_n_a_f)
5142 break;
5143
5144 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5145 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5146 warned_n_a_f = true;
5147 break;
5148
5149 default:
5150 gcc_unreachable ();
5151 }
5152 }
5153
5154 /* Expand the operands. */
5155 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5156 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5157
5158 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5159 after);
5160 }
5161
5162 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5163 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5164 true if this is the boolean form. TARGET is a place for us to store the
5165 results; this is NOT optional if IS_BOOL is true. */
5166
5167 static rtx
5168 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5169 bool is_bool, rtx target)
5170 {
5171 rtx old_val, new_val, mem;
5172 rtx *pbool, *poval;
5173
5174 /* Expand the operands. */
5175 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5176 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5177 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5178
5179 pbool = poval = NULL;
5180 if (target != const0_rtx)
5181 {
5182 if (is_bool)
5183 pbool = &target;
5184 else
5185 poval = &target;
5186 }
5187 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5188 false, MEMMODEL_SEQ_CST,
5189 MEMMODEL_SEQ_CST))
5190 return NULL_RTX;
5191
5192 return target;
5193 }
5194
5195 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5196 general form is actually an atomic exchange, and some targets only
5197 support a reduced form with the second argument being a constant 1.
5198 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5199 the results. */
5200
5201 static rtx
5202 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5203 rtx target)
5204 {
5205 rtx val, mem;
5206
5207 /* Expand the operands. */
5208 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5209 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5210
5211 return expand_sync_lock_test_and_set (target, mem, val);
5212 }
5213
5214 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5215
5216 static void
5217 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5218 {
5219 rtx mem;
5220
5221 /* Expand the operands. */
5222 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5223
5224 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5225 }
5226
5227 /* Given an integer representing an ``enum memmodel'', verify its
5228 correctness and return the memory model enum. */
5229
5230 static enum memmodel
5231 get_memmodel (tree exp)
5232 {
5233 rtx op;
5234 unsigned HOST_WIDE_INT val;
5235
5236 /* If the parameter is not a constant, it's a run time value so we'll just
5237 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5238 if (TREE_CODE (exp) != INTEGER_CST)
5239 return MEMMODEL_SEQ_CST;
5240
5241 op = expand_normal (exp);
5242
5243 val = INTVAL (op);
5244 if (targetm.memmodel_check)
5245 val = targetm.memmodel_check (val);
5246 else if (val & ~MEMMODEL_MASK)
5247 {
5248 warning (OPT_Winvalid_memory_model,
5249 "Unknown architecture specifier in memory model to builtin.");
5250 return MEMMODEL_SEQ_CST;
5251 }
5252
5253 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5254 {
5255 warning (OPT_Winvalid_memory_model,
5256 "invalid memory model argument to builtin");
5257 return MEMMODEL_SEQ_CST;
5258 }
5259
5260 return (enum memmodel) val;
5261 }
5262
5263 /* Expand the __atomic_exchange intrinsic:
5264 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5265 EXP is the CALL_EXPR.
5266 TARGET is an optional place for us to store the results. */
5267
5268 static rtx
5269 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5270 {
5271 rtx val, mem;
5272 enum memmodel model;
5273
5274 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5275 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5276 {
5277 error ("invalid memory model for %<__atomic_exchange%>");
5278 return NULL_RTX;
5279 }
5280
5281 if (!flag_inline_atomics)
5282 return NULL_RTX;
5283
5284 /* Expand the operands. */
5285 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5286 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5287
5288 return expand_atomic_exchange (target, mem, val, model);
5289 }
5290
5291 /* Expand the __atomic_compare_exchange intrinsic:
5292 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5293 TYPE desired, BOOL weak,
5294 enum memmodel success,
5295 enum memmodel failure)
5296 EXP is the CALL_EXPR.
5297 TARGET is an optional place for us to store the results. */
5298
5299 static rtx
5300 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5301 rtx target)
5302 {
5303 rtx expect, desired, mem, oldval, label;
5304 enum memmodel success, failure;
5305 tree weak;
5306 bool is_weak;
5307
5308 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5309 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5310
5311 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5312 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5313 {
5314 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5315 return NULL_RTX;
5316 }
5317
5318 if (failure > success)
5319 {
5320 error ("failure memory model cannot be stronger than success "
5321 "memory model for %<__atomic_compare_exchange%>");
5322 return NULL_RTX;
5323 }
5324
5325 if (!flag_inline_atomics)
5326 return NULL_RTX;
5327
5328 /* Expand the operands. */
5329 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5330
5331 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5332 expect = convert_memory_address (Pmode, expect);
5333 expect = gen_rtx_MEM (mode, expect);
5334 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5335
5336 weak = CALL_EXPR_ARG (exp, 3);
5337 is_weak = false;
5338 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5339 is_weak = true;
5340
5341 if (target == const0_rtx)
5342 target = NULL;
5343
5344 /* Lest the rtl backend create a race condition with an imporoper store
5345 to memory, always create a new pseudo for OLDVAL. */
5346 oldval = NULL;
5347
5348 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5349 is_weak, success, failure))
5350 return NULL_RTX;
5351
5352 /* Conditionally store back to EXPECT, lest we create a race condition
5353 with an improper store to memory. */
5354 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5355 the normal case where EXPECT is totally private, i.e. a register. At
5356 which point the store can be unconditional. */
5357 label = gen_label_rtx ();
5358 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5359 emit_move_insn (expect, oldval);
5360 emit_label (label);
5361
5362 return target;
5363 }
5364
5365 /* Expand the __atomic_load intrinsic:
5366 TYPE __atomic_load (TYPE *object, enum memmodel)
5367 EXP is the CALL_EXPR.
5368 TARGET is an optional place for us to store the results. */
5369
5370 static rtx
5371 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5372 {
5373 rtx mem;
5374 enum memmodel model;
5375
5376 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5377 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5378 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5379 {
5380 error ("invalid memory model for %<__atomic_load%>");
5381 return NULL_RTX;
5382 }
5383
5384 if (!flag_inline_atomics)
5385 return NULL_RTX;
5386
5387 /* Expand the operand. */
5388 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5389
5390 return expand_atomic_load (target, mem, model);
5391 }
5392
5393
5394 /* Expand the __atomic_store intrinsic:
5395 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5396 EXP is the CALL_EXPR.
5397 TARGET is an optional place for us to store the results. */
5398
5399 static rtx
5400 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5401 {
5402 rtx mem, val;
5403 enum memmodel model;
5404
5405 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5406 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5407 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5408 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5409 {
5410 error ("invalid memory model for %<__atomic_store%>");
5411 return NULL_RTX;
5412 }
5413
5414 if (!flag_inline_atomics)
5415 return NULL_RTX;
5416
5417 /* Expand the operands. */
5418 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5419 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5420
5421 return expand_atomic_store (mem, val, model, false);
5422 }
5423
5424 /* Expand the __atomic_fetch_XXX intrinsic:
5425 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5426 EXP is the CALL_EXPR.
5427 TARGET is an optional place for us to store the results.
5428 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5429 FETCH_AFTER is true if returning the result of the operation.
5430 FETCH_AFTER is false if returning the value before the operation.
5431 IGNORE is true if the result is not used.
5432 EXT_CALL is the correct builtin for an external call if this cannot be
5433 resolved to an instruction sequence. */
5434
5435 static rtx
5436 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5437 enum rtx_code code, bool fetch_after,
5438 bool ignore, enum built_in_function ext_call)
5439 {
5440 rtx val, mem, ret;
5441 enum memmodel model;
5442 tree fndecl;
5443 tree addr;
5444
5445 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5446
5447 /* Expand the operands. */
5448 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5449 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5450
5451 /* Only try generating instructions if inlining is turned on. */
5452 if (flag_inline_atomics)
5453 {
5454 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5455 if (ret)
5456 return ret;
5457 }
5458
5459 /* Return if a different routine isn't needed for the library call. */
5460 if (ext_call == BUILT_IN_NONE)
5461 return NULL_RTX;
5462
5463 /* Change the call to the specified function. */
5464 fndecl = get_callee_fndecl (exp);
5465 addr = CALL_EXPR_FN (exp);
5466 STRIP_NOPS (addr);
5467
5468 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5469 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5470
5471 /* Expand the call here so we can emit trailing code. */
5472 ret = expand_call (exp, target, ignore);
5473
5474 /* Replace the original function just in case it matters. */
5475 TREE_OPERAND (addr, 0) = fndecl;
5476
5477 /* Then issue the arithmetic correction to return the right result. */
5478 if (!ignore)
5479 {
5480 if (code == NOT)
5481 {
5482 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5483 OPTAB_LIB_WIDEN);
5484 ret = expand_simple_unop (mode, NOT, ret, target, true);
5485 }
5486 else
5487 ret = expand_simple_binop (mode, code, ret, val, target, true,
5488 OPTAB_LIB_WIDEN);
5489 }
5490 return ret;
5491 }
5492
5493
5494 #ifndef HAVE_atomic_clear
5495 # define HAVE_atomic_clear 0
5496 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5497 #endif
5498
5499 /* Expand an atomic clear operation.
5500 void _atomic_clear (BOOL *obj, enum memmodel)
5501 EXP is the call expression. */
5502
5503 static rtx
5504 expand_builtin_atomic_clear (tree exp)
5505 {
5506 enum machine_mode mode;
5507 rtx mem, ret;
5508 enum memmodel model;
5509
5510 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5511 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5512 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5513
5514 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5515 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5516 {
5517 error ("invalid memory model for %<__atomic_store%>");
5518 return const0_rtx;
5519 }
5520
5521 if (HAVE_atomic_clear)
5522 {
5523 emit_insn (gen_atomic_clear (mem, model));
5524 return const0_rtx;
5525 }
5526
5527 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5528 Failing that, a store is issued by __atomic_store. The only way this can
5529 fail is if the bool type is larger than a word size. Unlikely, but
5530 handle it anyway for completeness. Assume a single threaded model since
5531 there is no atomic support in this case, and no barriers are required. */
5532 ret = expand_atomic_store (mem, const0_rtx, model, true);
5533 if (!ret)
5534 emit_move_insn (mem, const0_rtx);
5535 return const0_rtx;
5536 }
5537
5538 /* Expand an atomic test_and_set operation.
5539 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5540 EXP is the call expression. */
5541
5542 static rtx
5543 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5544 {
5545 rtx mem;
5546 enum memmodel model;
5547 enum machine_mode mode;
5548
5549 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5550 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5551 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5552
5553 return expand_atomic_test_and_set (target, mem, model);
5554 }
5555
5556
5557 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5558 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5559
5560 static tree
5561 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5562 {
5563 int size;
5564 enum machine_mode mode;
5565 unsigned int mode_align, type_align;
5566
5567 if (TREE_CODE (arg0) != INTEGER_CST)
5568 return NULL_TREE;
5569
5570 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5571 mode = mode_for_size (size, MODE_INT, 0);
5572 mode_align = GET_MODE_ALIGNMENT (mode);
5573
5574 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5575 type_align = mode_align;
5576 else
5577 {
5578 tree ttype = TREE_TYPE (arg1);
5579
5580 /* This function is usually invoked and folded immediately by the front
5581 end before anything else has a chance to look at it. The pointer
5582 parameter at this point is usually cast to a void *, so check for that
5583 and look past the cast. */
5584 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5585 && VOID_TYPE_P (TREE_TYPE (ttype)))
5586 arg1 = TREE_OPERAND (arg1, 0);
5587
5588 ttype = TREE_TYPE (arg1);
5589 gcc_assert (POINTER_TYPE_P (ttype));
5590
5591 /* Get the underlying type of the object. */
5592 ttype = TREE_TYPE (ttype);
5593 type_align = TYPE_ALIGN (ttype);
5594 }
5595
5596 /* If the object has smaller alignment, the the lock free routines cannot
5597 be used. */
5598 if (type_align < mode_align)
5599 return boolean_false_node;
5600
5601 /* Check if a compare_and_swap pattern exists for the mode which represents
5602 the required size. The pattern is not allowed to fail, so the existence
5603 of the pattern indicates support is present. */
5604 if (can_compare_and_swap_p (mode, true))
5605 return boolean_true_node;
5606 else
5607 return boolean_false_node;
5608 }
5609
5610 /* Return true if the parameters to call EXP represent an object which will
5611 always generate lock free instructions. The first argument represents the
5612 size of the object, and the second parameter is a pointer to the object
5613 itself. If NULL is passed for the object, then the result is based on
5614 typical alignment for an object of the specified size. Otherwise return
5615 false. */
5616
5617 static rtx
5618 expand_builtin_atomic_always_lock_free (tree exp)
5619 {
5620 tree size;
5621 tree arg0 = CALL_EXPR_ARG (exp, 0);
5622 tree arg1 = CALL_EXPR_ARG (exp, 1);
5623
5624 if (TREE_CODE (arg0) != INTEGER_CST)
5625 {
5626 error ("non-constant argument 1 to __atomic_always_lock_free");
5627 return const0_rtx;
5628 }
5629
5630 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5631 if (size == boolean_true_node)
5632 return const1_rtx;
5633 return const0_rtx;
5634 }
5635
5636 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5637 is lock free on this architecture. */
5638
5639 static tree
5640 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5641 {
5642 if (!flag_inline_atomics)
5643 return NULL_TREE;
5644
5645 /* If it isn't always lock free, don't generate a result. */
5646 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5647 return boolean_true_node;
5648
5649 return NULL_TREE;
5650 }
5651
5652 /* Return true if the parameters to call EXP represent an object which will
5653 always generate lock free instructions. The first argument represents the
5654 size of the object, and the second parameter is a pointer to the object
5655 itself. If NULL is passed for the object, then the result is based on
5656 typical alignment for an object of the specified size. Otherwise return
5657 NULL*/
5658
5659 static rtx
5660 expand_builtin_atomic_is_lock_free (tree exp)
5661 {
5662 tree size;
5663 tree arg0 = CALL_EXPR_ARG (exp, 0);
5664 tree arg1 = CALL_EXPR_ARG (exp, 1);
5665
5666 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5667 {
5668 error ("non-integer argument 1 to __atomic_is_lock_free");
5669 return NULL_RTX;
5670 }
5671
5672 if (!flag_inline_atomics)
5673 return NULL_RTX;
5674
5675 /* If the value is known at compile time, return the RTX for it. */
5676 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5677 if (size == boolean_true_node)
5678 return const1_rtx;
5679
5680 return NULL_RTX;
5681 }
5682
5683 /* Expand the __atomic_thread_fence intrinsic:
5684 void __atomic_thread_fence (enum memmodel)
5685 EXP is the CALL_EXPR. */
5686
5687 static void
5688 expand_builtin_atomic_thread_fence (tree exp)
5689 {
5690 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5691 expand_mem_thread_fence (model);
5692 }
5693
5694 /* Expand the __atomic_signal_fence intrinsic:
5695 void __atomic_signal_fence (enum memmodel)
5696 EXP is the CALL_EXPR. */
5697
5698 static void
5699 expand_builtin_atomic_signal_fence (tree exp)
5700 {
5701 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5702 expand_mem_signal_fence (model);
5703 }
5704
5705 /* Expand the __sync_synchronize intrinsic. */
5706
5707 static void
5708 expand_builtin_sync_synchronize (void)
5709 {
5710 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5711 }
5712
5713 static rtx
5714 expand_builtin_thread_pointer (tree exp, rtx target)
5715 {
5716 enum insn_code icode;
5717 if (!validate_arglist (exp, VOID_TYPE))
5718 return const0_rtx;
5719 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5720 if (icode != CODE_FOR_nothing)
5721 {
5722 struct expand_operand op;
5723 /* If the target is not sutitable then create a new target. */
5724 if (target == NULL_RTX
5725 || !REG_P (target)
5726 || GET_MODE (target) != Pmode)
5727 target = gen_reg_rtx (Pmode);
5728 create_output_operand (&op, target, Pmode);
5729 expand_insn (icode, 1, &op);
5730 return target;
5731 }
5732 error ("__builtin_thread_pointer is not supported on this target");
5733 return const0_rtx;
5734 }
5735
5736 static void
5737 expand_builtin_set_thread_pointer (tree exp)
5738 {
5739 enum insn_code icode;
5740 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5741 return;
5742 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5743 if (icode != CODE_FOR_nothing)
5744 {
5745 struct expand_operand op;
5746 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5747 Pmode, EXPAND_NORMAL);
5748 create_input_operand (&op, val, Pmode);
5749 expand_insn (icode, 1, &op);
5750 return;
5751 }
5752 error ("__builtin_set_thread_pointer is not supported on this target");
5753 }
5754
5755 \f
5756 /* Emit code to restore the current value of stack. */
5757
5758 static void
5759 expand_stack_restore (tree var)
5760 {
5761 rtx prev, sa = expand_normal (var);
5762
5763 sa = convert_memory_address (Pmode, sa);
5764
5765 prev = get_last_insn ();
5766 emit_stack_restore (SAVE_BLOCK, sa);
5767 fixup_args_size_notes (prev, get_last_insn (), 0);
5768 }
5769
5770
5771 /* Emit code to save the current value of stack. */
5772
5773 static rtx
5774 expand_stack_save (void)
5775 {
5776 rtx ret = NULL_RTX;
5777
5778 do_pending_stack_adjust ();
5779 emit_stack_save (SAVE_BLOCK, &ret);
5780 return ret;
5781 }
5782
5783 /* Expand an expression EXP that calls a built-in function,
5784 with result going to TARGET if that's convenient
5785 (and in mode MODE if that's convenient).
5786 SUBTARGET may be used as the target for computing one of EXP's operands.
5787 IGNORE is nonzero if the value is to be ignored. */
5788
5789 rtx
5790 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5791 int ignore)
5792 {
5793 tree fndecl = get_callee_fndecl (exp);
5794 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5795 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5796 int flags;
5797
5798 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5799 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5800
5801 /* When not optimizing, generate calls to library functions for a certain
5802 set of builtins. */
5803 if (!optimize
5804 && !called_as_built_in (fndecl)
5805 && fcode != BUILT_IN_FORK
5806 && fcode != BUILT_IN_EXECL
5807 && fcode != BUILT_IN_EXECV
5808 && fcode != BUILT_IN_EXECLP
5809 && fcode != BUILT_IN_EXECLE
5810 && fcode != BUILT_IN_EXECVP
5811 && fcode != BUILT_IN_EXECVE
5812 && fcode != BUILT_IN_ALLOCA
5813 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5814 && fcode != BUILT_IN_FREE)
5815 return expand_call (exp, target, ignore);
5816
5817 /* The built-in function expanders test for target == const0_rtx
5818 to determine whether the function's result will be ignored. */
5819 if (ignore)
5820 target = const0_rtx;
5821
5822 /* If the result of a pure or const built-in function is ignored, and
5823 none of its arguments are volatile, we can avoid expanding the
5824 built-in call and just evaluate the arguments for side-effects. */
5825 if (target == const0_rtx
5826 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5827 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5828 {
5829 bool volatilep = false;
5830 tree arg;
5831 call_expr_arg_iterator iter;
5832
5833 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5834 if (TREE_THIS_VOLATILE (arg))
5835 {
5836 volatilep = true;
5837 break;
5838 }
5839
5840 if (! volatilep)
5841 {
5842 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5843 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5844 return const0_rtx;
5845 }
5846 }
5847
5848 switch (fcode)
5849 {
5850 CASE_FLT_FN (BUILT_IN_FABS):
5851 case BUILT_IN_FABSD32:
5852 case BUILT_IN_FABSD64:
5853 case BUILT_IN_FABSD128:
5854 target = expand_builtin_fabs (exp, target, subtarget);
5855 if (target)
5856 return target;
5857 break;
5858
5859 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5860 target = expand_builtin_copysign (exp, target, subtarget);
5861 if (target)
5862 return target;
5863 break;
5864
5865 /* Just do a normal library call if we were unable to fold
5866 the values. */
5867 CASE_FLT_FN (BUILT_IN_CABS):
5868 break;
5869
5870 CASE_FLT_FN (BUILT_IN_EXP):
5871 CASE_FLT_FN (BUILT_IN_EXP10):
5872 CASE_FLT_FN (BUILT_IN_POW10):
5873 CASE_FLT_FN (BUILT_IN_EXP2):
5874 CASE_FLT_FN (BUILT_IN_EXPM1):
5875 CASE_FLT_FN (BUILT_IN_LOGB):
5876 CASE_FLT_FN (BUILT_IN_LOG):
5877 CASE_FLT_FN (BUILT_IN_LOG10):
5878 CASE_FLT_FN (BUILT_IN_LOG2):
5879 CASE_FLT_FN (BUILT_IN_LOG1P):
5880 CASE_FLT_FN (BUILT_IN_TAN):
5881 CASE_FLT_FN (BUILT_IN_ASIN):
5882 CASE_FLT_FN (BUILT_IN_ACOS):
5883 CASE_FLT_FN (BUILT_IN_ATAN):
5884 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5885 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5886 because of possible accuracy problems. */
5887 if (! flag_unsafe_math_optimizations)
5888 break;
5889 CASE_FLT_FN (BUILT_IN_SQRT):
5890 CASE_FLT_FN (BUILT_IN_FLOOR):
5891 CASE_FLT_FN (BUILT_IN_CEIL):
5892 CASE_FLT_FN (BUILT_IN_TRUNC):
5893 CASE_FLT_FN (BUILT_IN_ROUND):
5894 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5895 CASE_FLT_FN (BUILT_IN_RINT):
5896 target = expand_builtin_mathfn (exp, target, subtarget);
5897 if (target)
5898 return target;
5899 break;
5900
5901 CASE_FLT_FN (BUILT_IN_FMA):
5902 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5903 if (target)
5904 return target;
5905 break;
5906
5907 CASE_FLT_FN (BUILT_IN_ILOGB):
5908 if (! flag_unsafe_math_optimizations)
5909 break;
5910 CASE_FLT_FN (BUILT_IN_ISINF):
5911 CASE_FLT_FN (BUILT_IN_FINITE):
5912 case BUILT_IN_ISFINITE:
5913 case BUILT_IN_ISNORMAL:
5914 target = expand_builtin_interclass_mathfn (exp, target);
5915 if (target)
5916 return target;
5917 break;
5918
5919 CASE_FLT_FN (BUILT_IN_ICEIL):
5920 CASE_FLT_FN (BUILT_IN_LCEIL):
5921 CASE_FLT_FN (BUILT_IN_LLCEIL):
5922 CASE_FLT_FN (BUILT_IN_LFLOOR):
5923 CASE_FLT_FN (BUILT_IN_IFLOOR):
5924 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5925 target = expand_builtin_int_roundingfn (exp, target);
5926 if (target)
5927 return target;
5928 break;
5929
5930 CASE_FLT_FN (BUILT_IN_IRINT):
5931 CASE_FLT_FN (BUILT_IN_LRINT):
5932 CASE_FLT_FN (BUILT_IN_LLRINT):
5933 CASE_FLT_FN (BUILT_IN_IROUND):
5934 CASE_FLT_FN (BUILT_IN_LROUND):
5935 CASE_FLT_FN (BUILT_IN_LLROUND):
5936 target = expand_builtin_int_roundingfn_2 (exp, target);
5937 if (target)
5938 return target;
5939 break;
5940
5941 CASE_FLT_FN (BUILT_IN_POWI):
5942 target = expand_builtin_powi (exp, target);
5943 if (target)
5944 return target;
5945 break;
5946
5947 CASE_FLT_FN (BUILT_IN_ATAN2):
5948 CASE_FLT_FN (BUILT_IN_LDEXP):
5949 CASE_FLT_FN (BUILT_IN_SCALB):
5950 CASE_FLT_FN (BUILT_IN_SCALBN):
5951 CASE_FLT_FN (BUILT_IN_SCALBLN):
5952 if (! flag_unsafe_math_optimizations)
5953 break;
5954
5955 CASE_FLT_FN (BUILT_IN_FMOD):
5956 CASE_FLT_FN (BUILT_IN_REMAINDER):
5957 CASE_FLT_FN (BUILT_IN_DREM):
5958 CASE_FLT_FN (BUILT_IN_POW):
5959 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5960 if (target)
5961 return target;
5962 break;
5963
5964 CASE_FLT_FN (BUILT_IN_CEXPI):
5965 target = expand_builtin_cexpi (exp, target);
5966 gcc_assert (target);
5967 return target;
5968
5969 CASE_FLT_FN (BUILT_IN_SIN):
5970 CASE_FLT_FN (BUILT_IN_COS):
5971 if (! flag_unsafe_math_optimizations)
5972 break;
5973 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5974 if (target)
5975 return target;
5976 break;
5977
5978 CASE_FLT_FN (BUILT_IN_SINCOS):
5979 if (! flag_unsafe_math_optimizations)
5980 break;
5981 target = expand_builtin_sincos (exp);
5982 if (target)
5983 return target;
5984 break;
5985
5986 case BUILT_IN_APPLY_ARGS:
5987 return expand_builtin_apply_args ();
5988
5989 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5990 FUNCTION with a copy of the parameters described by
5991 ARGUMENTS, and ARGSIZE. It returns a block of memory
5992 allocated on the stack into which is stored all the registers
5993 that might possibly be used for returning the result of a
5994 function. ARGUMENTS is the value returned by
5995 __builtin_apply_args. ARGSIZE is the number of bytes of
5996 arguments that must be copied. ??? How should this value be
5997 computed? We'll also need a safe worst case value for varargs
5998 functions. */
5999 case BUILT_IN_APPLY:
6000 if (!validate_arglist (exp, POINTER_TYPE,
6001 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6002 && !validate_arglist (exp, REFERENCE_TYPE,
6003 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6004 return const0_rtx;
6005 else
6006 {
6007 rtx ops[3];
6008
6009 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6010 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6011 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6012
6013 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6014 }
6015
6016 /* __builtin_return (RESULT) causes the function to return the
6017 value described by RESULT. RESULT is address of the block of
6018 memory returned by __builtin_apply. */
6019 case BUILT_IN_RETURN:
6020 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6021 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6022 return const0_rtx;
6023
6024 case BUILT_IN_SAVEREGS:
6025 return expand_builtin_saveregs ();
6026
6027 case BUILT_IN_VA_ARG_PACK:
6028 /* All valid uses of __builtin_va_arg_pack () are removed during
6029 inlining. */
6030 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6031 return const0_rtx;
6032
6033 case BUILT_IN_VA_ARG_PACK_LEN:
6034 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6035 inlining. */
6036 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6037 return const0_rtx;
6038
6039 /* Return the address of the first anonymous stack arg. */
6040 case BUILT_IN_NEXT_ARG:
6041 if (fold_builtin_next_arg (exp, false))
6042 return const0_rtx;
6043 return expand_builtin_next_arg ();
6044
6045 case BUILT_IN_CLEAR_CACHE:
6046 target = expand_builtin___clear_cache (exp);
6047 if (target)
6048 return target;
6049 break;
6050
6051 case BUILT_IN_CLASSIFY_TYPE:
6052 return expand_builtin_classify_type (exp);
6053
6054 case BUILT_IN_CONSTANT_P:
6055 return const0_rtx;
6056
6057 case BUILT_IN_FRAME_ADDRESS:
6058 case BUILT_IN_RETURN_ADDRESS:
6059 return expand_builtin_frame_address (fndecl, exp);
6060
6061 /* Returns the address of the area where the structure is returned.
6062 0 otherwise. */
6063 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6064 if (call_expr_nargs (exp) != 0
6065 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6066 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6067 return const0_rtx;
6068 else
6069 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6070
6071 case BUILT_IN_ALLOCA:
6072 case BUILT_IN_ALLOCA_WITH_ALIGN:
6073 /* If the allocation stems from the declaration of a variable-sized
6074 object, it cannot accumulate. */
6075 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6076 if (target)
6077 return target;
6078 break;
6079
6080 case BUILT_IN_STACK_SAVE:
6081 return expand_stack_save ();
6082
6083 case BUILT_IN_STACK_RESTORE:
6084 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6085 return const0_rtx;
6086
6087 case BUILT_IN_BSWAP16:
6088 case BUILT_IN_BSWAP32:
6089 case BUILT_IN_BSWAP64:
6090 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6091 if (target)
6092 return target;
6093 break;
6094
6095 CASE_INT_FN (BUILT_IN_FFS):
6096 target = expand_builtin_unop (target_mode, exp, target,
6097 subtarget, ffs_optab);
6098 if (target)
6099 return target;
6100 break;
6101
6102 CASE_INT_FN (BUILT_IN_CLZ):
6103 target = expand_builtin_unop (target_mode, exp, target,
6104 subtarget, clz_optab);
6105 if (target)
6106 return target;
6107 break;
6108
6109 CASE_INT_FN (BUILT_IN_CTZ):
6110 target = expand_builtin_unop (target_mode, exp, target,
6111 subtarget, ctz_optab);
6112 if (target)
6113 return target;
6114 break;
6115
6116 CASE_INT_FN (BUILT_IN_CLRSB):
6117 target = expand_builtin_unop (target_mode, exp, target,
6118 subtarget, clrsb_optab);
6119 if (target)
6120 return target;
6121 break;
6122
6123 CASE_INT_FN (BUILT_IN_POPCOUNT):
6124 target = expand_builtin_unop (target_mode, exp, target,
6125 subtarget, popcount_optab);
6126 if (target)
6127 return target;
6128 break;
6129
6130 CASE_INT_FN (BUILT_IN_PARITY):
6131 target = expand_builtin_unop (target_mode, exp, target,
6132 subtarget, parity_optab);
6133 if (target)
6134 return target;
6135 break;
6136
6137 case BUILT_IN_STRLEN:
6138 target = expand_builtin_strlen (exp, target, target_mode);
6139 if (target)
6140 return target;
6141 break;
6142
6143 case BUILT_IN_STRCPY:
6144 target = expand_builtin_strcpy (exp, target);
6145 if (target)
6146 return target;
6147 break;
6148
6149 case BUILT_IN_STRNCPY:
6150 target = expand_builtin_strncpy (exp, target);
6151 if (target)
6152 return target;
6153 break;
6154
6155 case BUILT_IN_STPCPY:
6156 target = expand_builtin_stpcpy (exp, target, mode);
6157 if (target)
6158 return target;
6159 break;
6160
6161 case BUILT_IN_MEMCPY:
6162 target = expand_builtin_memcpy (exp, target);
6163 if (target)
6164 return target;
6165 break;
6166
6167 case BUILT_IN_MEMPCPY:
6168 target = expand_builtin_mempcpy (exp, target, mode);
6169 if (target)
6170 return target;
6171 break;
6172
6173 case BUILT_IN_MEMSET:
6174 target = expand_builtin_memset (exp, target, mode);
6175 if (target)
6176 return target;
6177 break;
6178
6179 case BUILT_IN_BZERO:
6180 target = expand_builtin_bzero (exp);
6181 if (target)
6182 return target;
6183 break;
6184
6185 case BUILT_IN_STRCMP:
6186 target = expand_builtin_strcmp (exp, target);
6187 if (target)
6188 return target;
6189 break;
6190
6191 case BUILT_IN_STRNCMP:
6192 target = expand_builtin_strncmp (exp, target, mode);
6193 if (target)
6194 return target;
6195 break;
6196
6197 case BUILT_IN_BCMP:
6198 case BUILT_IN_MEMCMP:
6199 target = expand_builtin_memcmp (exp, target, mode);
6200 if (target)
6201 return target;
6202 break;
6203
6204 case BUILT_IN_SETJMP:
6205 /* This should have been lowered to the builtins below. */
6206 gcc_unreachable ();
6207
6208 case BUILT_IN_SETJMP_SETUP:
6209 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6210 and the receiver label. */
6211 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6212 {
6213 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6214 VOIDmode, EXPAND_NORMAL);
6215 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6216 rtx label_r = label_rtx (label);
6217
6218 /* This is copied from the handling of non-local gotos. */
6219 expand_builtin_setjmp_setup (buf_addr, label_r);
6220 nonlocal_goto_handler_labels
6221 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6222 nonlocal_goto_handler_labels);
6223 /* ??? Do not let expand_label treat us as such since we would
6224 not want to be both on the list of non-local labels and on
6225 the list of forced labels. */
6226 FORCED_LABEL (label) = 0;
6227 return const0_rtx;
6228 }
6229 break;
6230
6231 case BUILT_IN_SETJMP_RECEIVER:
6232 /* __builtin_setjmp_receiver is passed the receiver label. */
6233 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6234 {
6235 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6236 rtx label_r = label_rtx (label);
6237
6238 expand_builtin_setjmp_receiver (label_r);
6239 return const0_rtx;
6240 }
6241 break;
6242
6243 /* __builtin_longjmp is passed a pointer to an array of five words.
6244 It's similar to the C library longjmp function but works with
6245 __builtin_setjmp above. */
6246 case BUILT_IN_LONGJMP:
6247 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6248 {
6249 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6250 VOIDmode, EXPAND_NORMAL);
6251 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6252
6253 if (value != const1_rtx)
6254 {
6255 error ("%<__builtin_longjmp%> second argument must be 1");
6256 return const0_rtx;
6257 }
6258
6259 expand_builtin_longjmp (buf_addr, value);
6260 return const0_rtx;
6261 }
6262 break;
6263
6264 case BUILT_IN_NONLOCAL_GOTO:
6265 target = expand_builtin_nonlocal_goto (exp);
6266 if (target)
6267 return target;
6268 break;
6269
6270 /* This updates the setjmp buffer that is its argument with the value
6271 of the current stack pointer. */
6272 case BUILT_IN_UPDATE_SETJMP_BUF:
6273 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6274 {
6275 rtx buf_addr
6276 = expand_normal (CALL_EXPR_ARG (exp, 0));
6277
6278 expand_builtin_update_setjmp_buf (buf_addr);
6279 return const0_rtx;
6280 }
6281 break;
6282
6283 case BUILT_IN_TRAP:
6284 expand_builtin_trap ();
6285 return const0_rtx;
6286
6287 case BUILT_IN_UNREACHABLE:
6288 expand_builtin_unreachable ();
6289 return const0_rtx;
6290
6291 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6292 case BUILT_IN_SIGNBITD32:
6293 case BUILT_IN_SIGNBITD64:
6294 case BUILT_IN_SIGNBITD128:
6295 target = expand_builtin_signbit (exp, target);
6296 if (target)
6297 return target;
6298 break;
6299
6300 /* Various hooks for the DWARF 2 __throw routine. */
6301 case BUILT_IN_UNWIND_INIT:
6302 expand_builtin_unwind_init ();
6303 return const0_rtx;
6304 case BUILT_IN_DWARF_CFA:
6305 return virtual_cfa_rtx;
6306 #ifdef DWARF2_UNWIND_INFO
6307 case BUILT_IN_DWARF_SP_COLUMN:
6308 return expand_builtin_dwarf_sp_column ();
6309 case BUILT_IN_INIT_DWARF_REG_SIZES:
6310 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6311 return const0_rtx;
6312 #endif
6313 case BUILT_IN_FROB_RETURN_ADDR:
6314 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6315 case BUILT_IN_EXTRACT_RETURN_ADDR:
6316 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6317 case BUILT_IN_EH_RETURN:
6318 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6319 CALL_EXPR_ARG (exp, 1));
6320 return const0_rtx;
6321 #ifdef EH_RETURN_DATA_REGNO
6322 case BUILT_IN_EH_RETURN_DATA_REGNO:
6323 return expand_builtin_eh_return_data_regno (exp);
6324 #endif
6325 case BUILT_IN_EXTEND_POINTER:
6326 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6327 case BUILT_IN_EH_POINTER:
6328 return expand_builtin_eh_pointer (exp);
6329 case BUILT_IN_EH_FILTER:
6330 return expand_builtin_eh_filter (exp);
6331 case BUILT_IN_EH_COPY_VALUES:
6332 return expand_builtin_eh_copy_values (exp);
6333
6334 case BUILT_IN_VA_START:
6335 return expand_builtin_va_start (exp);
6336 case BUILT_IN_VA_END:
6337 return expand_builtin_va_end (exp);
6338 case BUILT_IN_VA_COPY:
6339 return expand_builtin_va_copy (exp);
6340 case BUILT_IN_EXPECT:
6341 return expand_builtin_expect (exp, target);
6342 case BUILT_IN_ASSUME_ALIGNED:
6343 return expand_builtin_assume_aligned (exp, target);
6344 case BUILT_IN_PREFETCH:
6345 expand_builtin_prefetch (exp);
6346 return const0_rtx;
6347
6348 case BUILT_IN_INIT_TRAMPOLINE:
6349 return expand_builtin_init_trampoline (exp, true);
6350 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6351 return expand_builtin_init_trampoline (exp, false);
6352 case BUILT_IN_ADJUST_TRAMPOLINE:
6353 return expand_builtin_adjust_trampoline (exp);
6354
6355 case BUILT_IN_FORK:
6356 case BUILT_IN_EXECL:
6357 case BUILT_IN_EXECV:
6358 case BUILT_IN_EXECLP:
6359 case BUILT_IN_EXECLE:
6360 case BUILT_IN_EXECVP:
6361 case BUILT_IN_EXECVE:
6362 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6363 if (target)
6364 return target;
6365 break;
6366
6367 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6368 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6369 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6370 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6371 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6372 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6373 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6374 if (target)
6375 return target;
6376 break;
6377
6378 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6379 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6380 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6381 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6382 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6383 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6384 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6385 if (target)
6386 return target;
6387 break;
6388
6389 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6390 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6391 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6392 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6393 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6394 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6395 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6396 if (target)
6397 return target;
6398 break;
6399
6400 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6401 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6402 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6403 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6404 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6405 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6406 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6407 if (target)
6408 return target;
6409 break;
6410
6411 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6412 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6413 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6414 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6415 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6416 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6417 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6418 if (target)
6419 return target;
6420 break;
6421
6422 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6423 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6424 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6425 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6426 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6427 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6428 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6429 if (target)
6430 return target;
6431 break;
6432
6433 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6434 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6435 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6436 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6437 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6438 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6439 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6440 if (target)
6441 return target;
6442 break;
6443
6444 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6445 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6446 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6447 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6448 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6449 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6450 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6451 if (target)
6452 return target;
6453 break;
6454
6455 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6456 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6457 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6458 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6459 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6460 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6461 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6462 if (target)
6463 return target;
6464 break;
6465
6466 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6467 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6468 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6469 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6470 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6471 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6472 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6473 if (target)
6474 return target;
6475 break;
6476
6477 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6478 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6479 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6480 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6481 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6482 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6483 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6484 if (target)
6485 return target;
6486 break;
6487
6488 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6489 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6490 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6491 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6492 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6493 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6494 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6495 if (target)
6496 return target;
6497 break;
6498
6499 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6500 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6501 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6502 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6503 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6504 if (mode == VOIDmode)
6505 mode = TYPE_MODE (boolean_type_node);
6506 if (!target || !register_operand (target, mode))
6507 target = gen_reg_rtx (mode);
6508
6509 mode = get_builtin_sync_mode
6510 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6511 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6512 if (target)
6513 return target;
6514 break;
6515
6516 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6517 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6518 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6519 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6520 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6521 mode = get_builtin_sync_mode
6522 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6523 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6524 if (target)
6525 return target;
6526 break;
6527
6528 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6529 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6530 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6531 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6532 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6533 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6534 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6535 if (target)
6536 return target;
6537 break;
6538
6539 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6540 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6541 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6542 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6543 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6544 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6545 expand_builtin_sync_lock_release (mode, exp);
6546 return const0_rtx;
6547
6548 case BUILT_IN_SYNC_SYNCHRONIZE:
6549 expand_builtin_sync_synchronize ();
6550 return const0_rtx;
6551
6552 case BUILT_IN_ATOMIC_EXCHANGE_1:
6553 case BUILT_IN_ATOMIC_EXCHANGE_2:
6554 case BUILT_IN_ATOMIC_EXCHANGE_4:
6555 case BUILT_IN_ATOMIC_EXCHANGE_8:
6556 case BUILT_IN_ATOMIC_EXCHANGE_16:
6557 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6558 target = expand_builtin_atomic_exchange (mode, exp, target);
6559 if (target)
6560 return target;
6561 break;
6562
6563 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6564 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6565 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6566 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6567 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6568 {
6569 unsigned int nargs, z;
6570 vec<tree, va_gc> *vec;
6571
6572 mode =
6573 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6574 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6575 if (target)
6576 return target;
6577
6578 /* If this is turned into an external library call, the weak parameter
6579 must be dropped to match the expected parameter list. */
6580 nargs = call_expr_nargs (exp);
6581 vec_alloc (vec, nargs - 1);
6582 for (z = 0; z < 3; z++)
6583 vec->quick_push (CALL_EXPR_ARG (exp, z));
6584 /* Skip the boolean weak parameter. */
6585 for (z = 4; z < 6; z++)
6586 vec->quick_push (CALL_EXPR_ARG (exp, z));
6587 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6588 break;
6589 }
6590
6591 case BUILT_IN_ATOMIC_LOAD_1:
6592 case BUILT_IN_ATOMIC_LOAD_2:
6593 case BUILT_IN_ATOMIC_LOAD_4:
6594 case BUILT_IN_ATOMIC_LOAD_8:
6595 case BUILT_IN_ATOMIC_LOAD_16:
6596 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6597 target = expand_builtin_atomic_load (mode, exp, target);
6598 if (target)
6599 return target;
6600 break;
6601
6602 case BUILT_IN_ATOMIC_STORE_1:
6603 case BUILT_IN_ATOMIC_STORE_2:
6604 case BUILT_IN_ATOMIC_STORE_4:
6605 case BUILT_IN_ATOMIC_STORE_8:
6606 case BUILT_IN_ATOMIC_STORE_16:
6607 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6608 target = expand_builtin_atomic_store (mode, exp);
6609 if (target)
6610 return const0_rtx;
6611 break;
6612
6613 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6614 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6615 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6616 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6617 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6618 {
6619 enum built_in_function lib;
6620 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6621 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6622 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6623 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6624 ignore, lib);
6625 if (target)
6626 return target;
6627 break;
6628 }
6629 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6630 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6631 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6632 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6633 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6634 {
6635 enum built_in_function lib;
6636 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6637 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6638 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6639 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6640 ignore, lib);
6641 if (target)
6642 return target;
6643 break;
6644 }
6645 case BUILT_IN_ATOMIC_AND_FETCH_1:
6646 case BUILT_IN_ATOMIC_AND_FETCH_2:
6647 case BUILT_IN_ATOMIC_AND_FETCH_4:
6648 case BUILT_IN_ATOMIC_AND_FETCH_8:
6649 case BUILT_IN_ATOMIC_AND_FETCH_16:
6650 {
6651 enum built_in_function lib;
6652 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6653 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6654 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6655 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6656 ignore, lib);
6657 if (target)
6658 return target;
6659 break;
6660 }
6661 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6662 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6663 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6664 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6665 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6666 {
6667 enum built_in_function lib;
6668 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6669 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6670 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6671 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6672 ignore, lib);
6673 if (target)
6674 return target;
6675 break;
6676 }
6677 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6678 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6679 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6680 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6681 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6682 {
6683 enum built_in_function lib;
6684 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6685 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6686 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6687 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6688 ignore, lib);
6689 if (target)
6690 return target;
6691 break;
6692 }
6693 case BUILT_IN_ATOMIC_OR_FETCH_1:
6694 case BUILT_IN_ATOMIC_OR_FETCH_2:
6695 case BUILT_IN_ATOMIC_OR_FETCH_4:
6696 case BUILT_IN_ATOMIC_OR_FETCH_8:
6697 case BUILT_IN_ATOMIC_OR_FETCH_16:
6698 {
6699 enum built_in_function lib;
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6701 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6702 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6703 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6704 ignore, lib);
6705 if (target)
6706 return target;
6707 break;
6708 }
6709 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6710 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6711 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6712 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6713 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6714 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6715 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6716 ignore, BUILT_IN_NONE);
6717 if (target)
6718 return target;
6719 break;
6720
6721 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6722 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6723 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6724 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6725 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6726 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6727 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6728 ignore, BUILT_IN_NONE);
6729 if (target)
6730 return target;
6731 break;
6732
6733 case BUILT_IN_ATOMIC_FETCH_AND_1:
6734 case BUILT_IN_ATOMIC_FETCH_AND_2:
6735 case BUILT_IN_ATOMIC_FETCH_AND_4:
6736 case BUILT_IN_ATOMIC_FETCH_AND_8:
6737 case BUILT_IN_ATOMIC_FETCH_AND_16:
6738 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6739 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6740 ignore, BUILT_IN_NONE);
6741 if (target)
6742 return target;
6743 break;
6744
6745 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6746 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6747 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6748 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6749 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6750 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6751 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6752 ignore, BUILT_IN_NONE);
6753 if (target)
6754 return target;
6755 break;
6756
6757 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6758 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6759 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6760 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6761 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6762 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6763 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6764 ignore, BUILT_IN_NONE);
6765 if (target)
6766 return target;
6767 break;
6768
6769 case BUILT_IN_ATOMIC_FETCH_OR_1:
6770 case BUILT_IN_ATOMIC_FETCH_OR_2:
6771 case BUILT_IN_ATOMIC_FETCH_OR_4:
6772 case BUILT_IN_ATOMIC_FETCH_OR_8:
6773 case BUILT_IN_ATOMIC_FETCH_OR_16:
6774 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6775 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6776 ignore, BUILT_IN_NONE);
6777 if (target)
6778 return target;
6779 break;
6780
6781 case BUILT_IN_ATOMIC_TEST_AND_SET:
6782 return expand_builtin_atomic_test_and_set (exp, target);
6783
6784 case BUILT_IN_ATOMIC_CLEAR:
6785 return expand_builtin_atomic_clear (exp);
6786
6787 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6788 return expand_builtin_atomic_always_lock_free (exp);
6789
6790 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6791 target = expand_builtin_atomic_is_lock_free (exp);
6792 if (target)
6793 return target;
6794 break;
6795
6796 case BUILT_IN_ATOMIC_THREAD_FENCE:
6797 expand_builtin_atomic_thread_fence (exp);
6798 return const0_rtx;
6799
6800 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6801 expand_builtin_atomic_signal_fence (exp);
6802 return const0_rtx;
6803
6804 case BUILT_IN_OBJECT_SIZE:
6805 return expand_builtin_object_size (exp);
6806
6807 case BUILT_IN_MEMCPY_CHK:
6808 case BUILT_IN_MEMPCPY_CHK:
6809 case BUILT_IN_MEMMOVE_CHK:
6810 case BUILT_IN_MEMSET_CHK:
6811 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6812 if (target)
6813 return target;
6814 break;
6815
6816 case BUILT_IN_STRCPY_CHK:
6817 case BUILT_IN_STPCPY_CHK:
6818 case BUILT_IN_STRNCPY_CHK:
6819 case BUILT_IN_STPNCPY_CHK:
6820 case BUILT_IN_STRCAT_CHK:
6821 case BUILT_IN_STRNCAT_CHK:
6822 case BUILT_IN_SNPRINTF_CHK:
6823 case BUILT_IN_VSNPRINTF_CHK:
6824 maybe_emit_chk_warning (exp, fcode);
6825 break;
6826
6827 case BUILT_IN_SPRINTF_CHK:
6828 case BUILT_IN_VSPRINTF_CHK:
6829 maybe_emit_sprintf_chk_warning (exp, fcode);
6830 break;
6831
6832 case BUILT_IN_FREE:
6833 if (warn_free_nonheap_object)
6834 maybe_emit_free_warning (exp);
6835 break;
6836
6837 case BUILT_IN_THREAD_POINTER:
6838 return expand_builtin_thread_pointer (exp, target);
6839
6840 case BUILT_IN_SET_THREAD_POINTER:
6841 expand_builtin_set_thread_pointer (exp);
6842 return const0_rtx;
6843
6844 case BUILT_IN_CILK_DETACH:
6845 expand_builtin_cilk_detach (exp);
6846 return const0_rtx;
6847
6848 case BUILT_IN_CILK_POP_FRAME:
6849 expand_builtin_cilk_pop_frame (exp);
6850 return const0_rtx;
6851
6852 default: /* just do library call, if unknown builtin */
6853 break;
6854 }
6855
6856 /* The switch statement above can drop through to cause the function
6857 to be called normally. */
6858 return expand_call (exp, target, ignore);
6859 }
6860
6861 /* Determine whether a tree node represents a call to a built-in
6862 function. If the tree T is a call to a built-in function with
6863 the right number of arguments of the appropriate types, return
6864 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6865 Otherwise the return value is END_BUILTINS. */
6866
6867 enum built_in_function
6868 builtin_mathfn_code (const_tree t)
6869 {
6870 const_tree fndecl, arg, parmlist;
6871 const_tree argtype, parmtype;
6872 const_call_expr_arg_iterator iter;
6873
6874 if (TREE_CODE (t) != CALL_EXPR
6875 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6876 return END_BUILTINS;
6877
6878 fndecl = get_callee_fndecl (t);
6879 if (fndecl == NULL_TREE
6880 || TREE_CODE (fndecl) != FUNCTION_DECL
6881 || ! DECL_BUILT_IN (fndecl)
6882 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6883 return END_BUILTINS;
6884
6885 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6886 init_const_call_expr_arg_iterator (t, &iter);
6887 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6888 {
6889 /* If a function doesn't take a variable number of arguments,
6890 the last element in the list will have type `void'. */
6891 parmtype = TREE_VALUE (parmlist);
6892 if (VOID_TYPE_P (parmtype))
6893 {
6894 if (more_const_call_expr_args_p (&iter))
6895 return END_BUILTINS;
6896 return DECL_FUNCTION_CODE (fndecl);
6897 }
6898
6899 if (! more_const_call_expr_args_p (&iter))
6900 return END_BUILTINS;
6901
6902 arg = next_const_call_expr_arg (&iter);
6903 argtype = TREE_TYPE (arg);
6904
6905 if (SCALAR_FLOAT_TYPE_P (parmtype))
6906 {
6907 if (! SCALAR_FLOAT_TYPE_P (argtype))
6908 return END_BUILTINS;
6909 }
6910 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6911 {
6912 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6913 return END_BUILTINS;
6914 }
6915 else if (POINTER_TYPE_P (parmtype))
6916 {
6917 if (! POINTER_TYPE_P (argtype))
6918 return END_BUILTINS;
6919 }
6920 else if (INTEGRAL_TYPE_P (parmtype))
6921 {
6922 if (! INTEGRAL_TYPE_P (argtype))
6923 return END_BUILTINS;
6924 }
6925 else
6926 return END_BUILTINS;
6927 }
6928
6929 /* Variable-length argument list. */
6930 return DECL_FUNCTION_CODE (fndecl);
6931 }
6932
6933 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6934 evaluate to a constant. */
6935
6936 static tree
6937 fold_builtin_constant_p (tree arg)
6938 {
6939 /* We return 1 for a numeric type that's known to be a constant
6940 value at compile-time or for an aggregate type that's a
6941 literal constant. */
6942 STRIP_NOPS (arg);
6943
6944 /* If we know this is a constant, emit the constant of one. */
6945 if (CONSTANT_CLASS_P (arg)
6946 || (TREE_CODE (arg) == CONSTRUCTOR
6947 && TREE_CONSTANT (arg)))
6948 return integer_one_node;
6949 if (TREE_CODE (arg) == ADDR_EXPR)
6950 {
6951 tree op = TREE_OPERAND (arg, 0);
6952 if (TREE_CODE (op) == STRING_CST
6953 || (TREE_CODE (op) == ARRAY_REF
6954 && integer_zerop (TREE_OPERAND (op, 1))
6955 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6956 return integer_one_node;
6957 }
6958
6959 /* If this expression has side effects, show we don't know it to be a
6960 constant. Likewise if it's a pointer or aggregate type since in
6961 those case we only want literals, since those are only optimized
6962 when generating RTL, not later.
6963 And finally, if we are compiling an initializer, not code, we
6964 need to return a definite result now; there's not going to be any
6965 more optimization done. */
6966 if (TREE_SIDE_EFFECTS (arg)
6967 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6968 || POINTER_TYPE_P (TREE_TYPE (arg))
6969 || cfun == 0
6970 || folding_initializer
6971 || force_folding_builtin_constant_p)
6972 return integer_zero_node;
6973
6974 return NULL_TREE;
6975 }
6976
6977 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6978 return it as a truthvalue. */
6979
6980 static tree
6981 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6982 {
6983 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6984
6985 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6986 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6987 ret_type = TREE_TYPE (TREE_TYPE (fn));
6988 pred_type = TREE_VALUE (arg_types);
6989 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6990
6991 pred = fold_convert_loc (loc, pred_type, pred);
6992 expected = fold_convert_loc (loc, expected_type, expected);
6993 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6994
6995 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6996 build_int_cst (ret_type, 0));
6997 }
6998
6999 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7000 NULL_TREE if no simplification is possible. */
7001
7002 static tree
7003 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7004 {
7005 tree inner, fndecl, inner_arg0;
7006 enum tree_code code;
7007
7008 /* Distribute the expected value over short-circuiting operators.
7009 See through the cast from truthvalue_type_node to long. */
7010 inner_arg0 = arg0;
7011 while (TREE_CODE (inner_arg0) == NOP_EXPR
7012 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7013 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7014 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7015
7016 /* If this is a builtin_expect within a builtin_expect keep the
7017 inner one. See through a comparison against a constant. It
7018 might have been added to create a thruthvalue. */
7019 inner = inner_arg0;
7020
7021 if (COMPARISON_CLASS_P (inner)
7022 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7023 inner = TREE_OPERAND (inner, 0);
7024
7025 if (TREE_CODE (inner) == CALL_EXPR
7026 && (fndecl = get_callee_fndecl (inner))
7027 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7028 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7029 return arg0;
7030
7031 inner = inner_arg0;
7032 code = TREE_CODE (inner);
7033 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7034 {
7035 tree op0 = TREE_OPERAND (inner, 0);
7036 tree op1 = TREE_OPERAND (inner, 1);
7037
7038 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7039 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7040 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7041
7042 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7043 }
7044
7045 /* If the argument isn't invariant then there's nothing else we can do. */
7046 if (!TREE_CONSTANT (inner_arg0))
7047 return NULL_TREE;
7048
7049 /* If we expect that a comparison against the argument will fold to
7050 a constant return the constant. In practice, this means a true
7051 constant or the address of a non-weak symbol. */
7052 inner = inner_arg0;
7053 STRIP_NOPS (inner);
7054 if (TREE_CODE (inner) == ADDR_EXPR)
7055 {
7056 do
7057 {
7058 inner = TREE_OPERAND (inner, 0);
7059 }
7060 while (TREE_CODE (inner) == COMPONENT_REF
7061 || TREE_CODE (inner) == ARRAY_REF);
7062 if ((TREE_CODE (inner) == VAR_DECL
7063 || TREE_CODE (inner) == FUNCTION_DECL)
7064 && DECL_WEAK (inner))
7065 return NULL_TREE;
7066 }
7067
7068 /* Otherwise, ARG0 already has the proper type for the return value. */
7069 return arg0;
7070 }
7071
7072 /* Fold a call to __builtin_classify_type with argument ARG. */
7073
7074 static tree
7075 fold_builtin_classify_type (tree arg)
7076 {
7077 if (arg == 0)
7078 return build_int_cst (integer_type_node, no_type_class);
7079
7080 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7081 }
7082
7083 /* Fold a call to __builtin_strlen with argument ARG. */
7084
7085 static tree
7086 fold_builtin_strlen (location_t loc, tree type, tree arg)
7087 {
7088 if (!validate_arg (arg, POINTER_TYPE))
7089 return NULL_TREE;
7090 else
7091 {
7092 tree len = c_strlen (arg, 0);
7093
7094 if (len)
7095 return fold_convert_loc (loc, type, len);
7096
7097 return NULL_TREE;
7098 }
7099 }
7100
7101 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7102
7103 static tree
7104 fold_builtin_inf (location_t loc, tree type, int warn)
7105 {
7106 REAL_VALUE_TYPE real;
7107
7108 /* __builtin_inff is intended to be usable to define INFINITY on all
7109 targets. If an infinity is not available, INFINITY expands "to a
7110 positive constant of type float that overflows at translation
7111 time", footnote "In this case, using INFINITY will violate the
7112 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7113 Thus we pedwarn to ensure this constraint violation is
7114 diagnosed. */
7115 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7116 pedwarn (loc, 0, "target format does not support infinity");
7117
7118 real_inf (&real);
7119 return build_real (type, real);
7120 }
7121
7122 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7123
7124 static tree
7125 fold_builtin_nan (tree arg, tree type, int quiet)
7126 {
7127 REAL_VALUE_TYPE real;
7128 const char *str;
7129
7130 if (!validate_arg (arg, POINTER_TYPE))
7131 return NULL_TREE;
7132 str = c_getstr (arg);
7133 if (!str)
7134 return NULL_TREE;
7135
7136 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7137 return NULL_TREE;
7138
7139 return build_real (type, real);
7140 }
7141
7142 /* Return true if the floating point expression T has an integer value.
7143 We also allow +Inf, -Inf and NaN to be considered integer values. */
7144
7145 static bool
7146 integer_valued_real_p (tree t)
7147 {
7148 switch (TREE_CODE (t))
7149 {
7150 case FLOAT_EXPR:
7151 return true;
7152
7153 case ABS_EXPR:
7154 case SAVE_EXPR:
7155 return integer_valued_real_p (TREE_OPERAND (t, 0));
7156
7157 case COMPOUND_EXPR:
7158 case MODIFY_EXPR:
7159 case BIND_EXPR:
7160 return integer_valued_real_p (TREE_OPERAND (t, 1));
7161
7162 case PLUS_EXPR:
7163 case MINUS_EXPR:
7164 case MULT_EXPR:
7165 case MIN_EXPR:
7166 case MAX_EXPR:
7167 return integer_valued_real_p (TREE_OPERAND (t, 0))
7168 && integer_valued_real_p (TREE_OPERAND (t, 1));
7169
7170 case COND_EXPR:
7171 return integer_valued_real_p (TREE_OPERAND (t, 1))
7172 && integer_valued_real_p (TREE_OPERAND (t, 2));
7173
7174 case REAL_CST:
7175 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7176
7177 case NOP_EXPR:
7178 {
7179 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7180 if (TREE_CODE (type) == INTEGER_TYPE)
7181 return true;
7182 if (TREE_CODE (type) == REAL_TYPE)
7183 return integer_valued_real_p (TREE_OPERAND (t, 0));
7184 break;
7185 }
7186
7187 case CALL_EXPR:
7188 switch (builtin_mathfn_code (t))
7189 {
7190 CASE_FLT_FN (BUILT_IN_CEIL):
7191 CASE_FLT_FN (BUILT_IN_FLOOR):
7192 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7193 CASE_FLT_FN (BUILT_IN_RINT):
7194 CASE_FLT_FN (BUILT_IN_ROUND):
7195 CASE_FLT_FN (BUILT_IN_TRUNC):
7196 return true;
7197
7198 CASE_FLT_FN (BUILT_IN_FMIN):
7199 CASE_FLT_FN (BUILT_IN_FMAX):
7200 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7201 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7202
7203 default:
7204 break;
7205 }
7206 break;
7207
7208 default:
7209 break;
7210 }
7211 return false;
7212 }
7213
7214 /* FNDECL is assumed to be a builtin where truncation can be propagated
7215 across (for instance floor((double)f) == (double)floorf (f).
7216 Do the transformation for a call with argument ARG. */
7217
7218 static tree
7219 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7220 {
7221 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7222
7223 if (!validate_arg (arg, REAL_TYPE))
7224 return NULL_TREE;
7225
7226 /* Integer rounding functions are idempotent. */
7227 if (fcode == builtin_mathfn_code (arg))
7228 return arg;
7229
7230 /* If argument is already integer valued, and we don't need to worry
7231 about setting errno, there's no need to perform rounding. */
7232 if (! flag_errno_math && integer_valued_real_p (arg))
7233 return arg;
7234
7235 if (optimize)
7236 {
7237 tree arg0 = strip_float_extensions (arg);
7238 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7239 tree newtype = TREE_TYPE (arg0);
7240 tree decl;
7241
7242 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7243 && (decl = mathfn_built_in (newtype, fcode)))
7244 return fold_convert_loc (loc, ftype,
7245 build_call_expr_loc (loc, decl, 1,
7246 fold_convert_loc (loc,
7247 newtype,
7248 arg0)));
7249 }
7250 return NULL_TREE;
7251 }
7252
7253 /* FNDECL is assumed to be builtin which can narrow the FP type of
7254 the argument, for instance lround((double)f) -> lroundf (f).
7255 Do the transformation for a call with argument ARG. */
7256
7257 static tree
7258 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7259 {
7260 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7261
7262 if (!validate_arg (arg, REAL_TYPE))
7263 return NULL_TREE;
7264
7265 /* If argument is already integer valued, and we don't need to worry
7266 about setting errno, there's no need to perform rounding. */
7267 if (! flag_errno_math && integer_valued_real_p (arg))
7268 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7269 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7270
7271 if (optimize)
7272 {
7273 tree ftype = TREE_TYPE (arg);
7274 tree arg0 = strip_float_extensions (arg);
7275 tree newtype = TREE_TYPE (arg0);
7276 tree decl;
7277
7278 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7279 && (decl = mathfn_built_in (newtype, fcode)))
7280 return build_call_expr_loc (loc, decl, 1,
7281 fold_convert_loc (loc, newtype, arg0));
7282 }
7283
7284 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7285 sizeof (int) == sizeof (long). */
7286 if (TYPE_PRECISION (integer_type_node)
7287 == TYPE_PRECISION (long_integer_type_node))
7288 {
7289 tree newfn = NULL_TREE;
7290 switch (fcode)
7291 {
7292 CASE_FLT_FN (BUILT_IN_ICEIL):
7293 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7294 break;
7295
7296 CASE_FLT_FN (BUILT_IN_IFLOOR):
7297 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7298 break;
7299
7300 CASE_FLT_FN (BUILT_IN_IROUND):
7301 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7302 break;
7303
7304 CASE_FLT_FN (BUILT_IN_IRINT):
7305 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7306 break;
7307
7308 default:
7309 break;
7310 }
7311
7312 if (newfn)
7313 {
7314 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7315 return fold_convert_loc (loc,
7316 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7317 }
7318 }
7319
7320 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7321 sizeof (long long) == sizeof (long). */
7322 if (TYPE_PRECISION (long_long_integer_type_node)
7323 == TYPE_PRECISION (long_integer_type_node))
7324 {
7325 tree newfn = NULL_TREE;
7326 switch (fcode)
7327 {
7328 CASE_FLT_FN (BUILT_IN_LLCEIL):
7329 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7330 break;
7331
7332 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7333 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7334 break;
7335
7336 CASE_FLT_FN (BUILT_IN_LLROUND):
7337 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7338 break;
7339
7340 CASE_FLT_FN (BUILT_IN_LLRINT):
7341 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7342 break;
7343
7344 default:
7345 break;
7346 }
7347
7348 if (newfn)
7349 {
7350 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7351 return fold_convert_loc (loc,
7352 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7353 }
7354 }
7355
7356 return NULL_TREE;
7357 }
7358
7359 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7360 return type. Return NULL_TREE if no simplification can be made. */
7361
7362 static tree
7363 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7364 {
7365 tree res;
7366
7367 if (!validate_arg (arg, COMPLEX_TYPE)
7368 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7369 return NULL_TREE;
7370
7371 /* Calculate the result when the argument is a constant. */
7372 if (TREE_CODE (arg) == COMPLEX_CST
7373 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7374 type, mpfr_hypot)))
7375 return res;
7376
7377 if (TREE_CODE (arg) == COMPLEX_EXPR)
7378 {
7379 tree real = TREE_OPERAND (arg, 0);
7380 tree imag = TREE_OPERAND (arg, 1);
7381
7382 /* If either part is zero, cabs is fabs of the other. */
7383 if (real_zerop (real))
7384 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7385 if (real_zerop (imag))
7386 return fold_build1_loc (loc, ABS_EXPR, type, real);
7387
7388 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7389 if (flag_unsafe_math_optimizations
7390 && operand_equal_p (real, imag, OEP_PURE_SAME))
7391 {
7392 const REAL_VALUE_TYPE sqrt2_trunc
7393 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7394 STRIP_NOPS (real);
7395 return fold_build2_loc (loc, MULT_EXPR, type,
7396 fold_build1_loc (loc, ABS_EXPR, type, real),
7397 build_real (type, sqrt2_trunc));
7398 }
7399 }
7400
7401 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7402 if (TREE_CODE (arg) == NEGATE_EXPR
7403 || TREE_CODE (arg) == CONJ_EXPR)
7404 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7405
7406 /* Don't do this when optimizing for size. */
7407 if (flag_unsafe_math_optimizations
7408 && optimize && optimize_function_for_speed_p (cfun))
7409 {
7410 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7411
7412 if (sqrtfn != NULL_TREE)
7413 {
7414 tree rpart, ipart, result;
7415
7416 arg = builtin_save_expr (arg);
7417
7418 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7419 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7420
7421 rpart = builtin_save_expr (rpart);
7422 ipart = builtin_save_expr (ipart);
7423
7424 result = fold_build2_loc (loc, PLUS_EXPR, type,
7425 fold_build2_loc (loc, MULT_EXPR, type,
7426 rpart, rpart),
7427 fold_build2_loc (loc, MULT_EXPR, type,
7428 ipart, ipart));
7429
7430 return build_call_expr_loc (loc, sqrtfn, 1, result);
7431 }
7432 }
7433
7434 return NULL_TREE;
7435 }
7436
7437 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7438 complex tree type of the result. If NEG is true, the imaginary
7439 zero is negative. */
7440
7441 static tree
7442 build_complex_cproj (tree type, bool neg)
7443 {
7444 REAL_VALUE_TYPE rinf, rzero = dconst0;
7445
7446 real_inf (&rinf);
7447 rzero.sign = neg;
7448 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7449 build_real (TREE_TYPE (type), rzero));
7450 }
7451
7452 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7453 return type. Return NULL_TREE if no simplification can be made. */
7454
7455 static tree
7456 fold_builtin_cproj (location_t loc, tree arg, tree type)
7457 {
7458 if (!validate_arg (arg, COMPLEX_TYPE)
7459 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7460 return NULL_TREE;
7461
7462 /* If there are no infinities, return arg. */
7463 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7464 return non_lvalue_loc (loc, arg);
7465
7466 /* Calculate the result when the argument is a constant. */
7467 if (TREE_CODE (arg) == COMPLEX_CST)
7468 {
7469 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7470 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7471
7472 if (real_isinf (real) || real_isinf (imag))
7473 return build_complex_cproj (type, imag->sign);
7474 else
7475 return arg;
7476 }
7477 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7478 {
7479 tree real = TREE_OPERAND (arg, 0);
7480 tree imag = TREE_OPERAND (arg, 1);
7481
7482 STRIP_NOPS (real);
7483 STRIP_NOPS (imag);
7484
7485 /* If the real part is inf and the imag part is known to be
7486 nonnegative, return (inf + 0i). Remember side-effects are
7487 possible in the imag part. */
7488 if (TREE_CODE (real) == REAL_CST
7489 && real_isinf (TREE_REAL_CST_PTR (real))
7490 && tree_expr_nonnegative_p (imag))
7491 return omit_one_operand_loc (loc, type,
7492 build_complex_cproj (type, false),
7493 arg);
7494
7495 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7496 Remember side-effects are possible in the real part. */
7497 if (TREE_CODE (imag) == REAL_CST
7498 && real_isinf (TREE_REAL_CST_PTR (imag)))
7499 return
7500 omit_one_operand_loc (loc, type,
7501 build_complex_cproj (type, TREE_REAL_CST_PTR
7502 (imag)->sign), arg);
7503 }
7504
7505 return NULL_TREE;
7506 }
7507
7508 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7509 Return NULL_TREE if no simplification can be made. */
7510
7511 static tree
7512 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7513 {
7514
7515 enum built_in_function fcode;
7516 tree res;
7517
7518 if (!validate_arg (arg, REAL_TYPE))
7519 return NULL_TREE;
7520
7521 /* Calculate the result when the argument is a constant. */
7522 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7523 return res;
7524
7525 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7526 fcode = builtin_mathfn_code (arg);
7527 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7528 {
7529 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7530 arg = fold_build2_loc (loc, MULT_EXPR, type,
7531 CALL_EXPR_ARG (arg, 0),
7532 build_real (type, dconsthalf));
7533 return build_call_expr_loc (loc, expfn, 1, arg);
7534 }
7535
7536 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7537 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7538 {
7539 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7540
7541 if (powfn)
7542 {
7543 tree arg0 = CALL_EXPR_ARG (arg, 0);
7544 tree tree_root;
7545 /* The inner root was either sqrt or cbrt. */
7546 /* This was a conditional expression but it triggered a bug
7547 in Sun C 5.5. */
7548 REAL_VALUE_TYPE dconstroot;
7549 if (BUILTIN_SQRT_P (fcode))
7550 dconstroot = dconsthalf;
7551 else
7552 dconstroot = dconst_third ();
7553
7554 /* Adjust for the outer root. */
7555 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7556 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7557 tree_root = build_real (type, dconstroot);
7558 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7559 }
7560 }
7561
7562 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7563 if (flag_unsafe_math_optimizations
7564 && (fcode == BUILT_IN_POW
7565 || fcode == BUILT_IN_POWF
7566 || fcode == BUILT_IN_POWL))
7567 {
7568 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7569 tree arg0 = CALL_EXPR_ARG (arg, 0);
7570 tree arg1 = CALL_EXPR_ARG (arg, 1);
7571 tree narg1;
7572 if (!tree_expr_nonnegative_p (arg0))
7573 arg0 = build1 (ABS_EXPR, type, arg0);
7574 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7575 build_real (type, dconsthalf));
7576 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7577 }
7578
7579 return NULL_TREE;
7580 }
7581
7582 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7583 Return NULL_TREE if no simplification can be made. */
7584
7585 static tree
7586 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7587 {
7588 const enum built_in_function fcode = builtin_mathfn_code (arg);
7589 tree res;
7590
7591 if (!validate_arg (arg, REAL_TYPE))
7592 return NULL_TREE;
7593
7594 /* Calculate the result when the argument is a constant. */
7595 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7596 return res;
7597
7598 if (flag_unsafe_math_optimizations)
7599 {
7600 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7601 if (BUILTIN_EXPONENT_P (fcode))
7602 {
7603 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7604 const REAL_VALUE_TYPE third_trunc =
7605 real_value_truncate (TYPE_MODE (type), dconst_third ());
7606 arg = fold_build2_loc (loc, MULT_EXPR, type,
7607 CALL_EXPR_ARG (arg, 0),
7608 build_real (type, third_trunc));
7609 return build_call_expr_loc (loc, expfn, 1, arg);
7610 }
7611
7612 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7613 if (BUILTIN_SQRT_P (fcode))
7614 {
7615 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7616
7617 if (powfn)
7618 {
7619 tree arg0 = CALL_EXPR_ARG (arg, 0);
7620 tree tree_root;
7621 REAL_VALUE_TYPE dconstroot = dconst_third ();
7622
7623 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7624 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7625 tree_root = build_real (type, dconstroot);
7626 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7627 }
7628 }
7629
7630 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7631 if (BUILTIN_CBRT_P (fcode))
7632 {
7633 tree arg0 = CALL_EXPR_ARG (arg, 0);
7634 if (tree_expr_nonnegative_p (arg0))
7635 {
7636 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7637
7638 if (powfn)
7639 {
7640 tree tree_root;
7641 REAL_VALUE_TYPE dconstroot;
7642
7643 real_arithmetic (&dconstroot, MULT_EXPR,
7644 dconst_third_ptr (), dconst_third_ptr ());
7645 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7646 tree_root = build_real (type, dconstroot);
7647 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7648 }
7649 }
7650 }
7651
7652 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7653 if (fcode == BUILT_IN_POW
7654 || fcode == BUILT_IN_POWF
7655 || fcode == BUILT_IN_POWL)
7656 {
7657 tree arg00 = CALL_EXPR_ARG (arg, 0);
7658 tree arg01 = CALL_EXPR_ARG (arg, 1);
7659 if (tree_expr_nonnegative_p (arg00))
7660 {
7661 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7662 const REAL_VALUE_TYPE dconstroot
7663 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7664 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7665 build_real (type, dconstroot));
7666 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7667 }
7668 }
7669 }
7670 return NULL_TREE;
7671 }
7672
7673 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7674 TYPE is the type of the return value. Return NULL_TREE if no
7675 simplification can be made. */
7676
7677 static tree
7678 fold_builtin_cos (location_t loc,
7679 tree arg, tree type, tree fndecl)
7680 {
7681 tree res, narg;
7682
7683 if (!validate_arg (arg, REAL_TYPE))
7684 return NULL_TREE;
7685
7686 /* Calculate the result when the argument is a constant. */
7687 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7688 return res;
7689
7690 /* Optimize cos(-x) into cos (x). */
7691 if ((narg = fold_strip_sign_ops (arg)))
7692 return build_call_expr_loc (loc, fndecl, 1, narg);
7693
7694 return NULL_TREE;
7695 }
7696
7697 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7698 Return NULL_TREE if no simplification can be made. */
7699
7700 static tree
7701 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7702 {
7703 if (validate_arg (arg, REAL_TYPE))
7704 {
7705 tree res, narg;
7706
7707 /* Calculate the result when the argument is a constant. */
7708 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7709 return res;
7710
7711 /* Optimize cosh(-x) into cosh (x). */
7712 if ((narg = fold_strip_sign_ops (arg)))
7713 return build_call_expr_loc (loc, fndecl, 1, narg);
7714 }
7715
7716 return NULL_TREE;
7717 }
7718
7719 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7720 argument ARG. TYPE is the type of the return value. Return
7721 NULL_TREE if no simplification can be made. */
7722
7723 static tree
7724 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7725 bool hyper)
7726 {
7727 if (validate_arg (arg, COMPLEX_TYPE)
7728 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7729 {
7730 tree tmp;
7731
7732 /* Calculate the result when the argument is a constant. */
7733 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7734 return tmp;
7735
7736 /* Optimize fn(-x) into fn(x). */
7737 if ((tmp = fold_strip_sign_ops (arg)))
7738 return build_call_expr_loc (loc, fndecl, 1, tmp);
7739 }
7740
7741 return NULL_TREE;
7742 }
7743
7744 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7745 Return NULL_TREE if no simplification can be made. */
7746
7747 static tree
7748 fold_builtin_tan (tree arg, tree type)
7749 {
7750 enum built_in_function fcode;
7751 tree res;
7752
7753 if (!validate_arg (arg, REAL_TYPE))
7754 return NULL_TREE;
7755
7756 /* Calculate the result when the argument is a constant. */
7757 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7758 return res;
7759
7760 /* Optimize tan(atan(x)) = x. */
7761 fcode = builtin_mathfn_code (arg);
7762 if (flag_unsafe_math_optimizations
7763 && (fcode == BUILT_IN_ATAN
7764 || fcode == BUILT_IN_ATANF
7765 || fcode == BUILT_IN_ATANL))
7766 return CALL_EXPR_ARG (arg, 0);
7767
7768 return NULL_TREE;
7769 }
7770
7771 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7772 NULL_TREE if no simplification can be made. */
7773
7774 static tree
7775 fold_builtin_sincos (location_t loc,
7776 tree arg0, tree arg1, tree arg2)
7777 {
7778 tree type;
7779 tree res, fn, call;
7780
7781 if (!validate_arg (arg0, REAL_TYPE)
7782 || !validate_arg (arg1, POINTER_TYPE)
7783 || !validate_arg (arg2, POINTER_TYPE))
7784 return NULL_TREE;
7785
7786 type = TREE_TYPE (arg0);
7787
7788 /* Calculate the result when the argument is a constant. */
7789 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7790 return res;
7791
7792 /* Canonicalize sincos to cexpi. */
7793 if (!targetm.libc_has_function (function_c99_math_complex))
7794 return NULL_TREE;
7795 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7796 if (!fn)
7797 return NULL_TREE;
7798
7799 call = build_call_expr_loc (loc, fn, 1, arg0);
7800 call = builtin_save_expr (call);
7801
7802 return build2 (COMPOUND_EXPR, void_type_node,
7803 build2 (MODIFY_EXPR, void_type_node,
7804 build_fold_indirect_ref_loc (loc, arg1),
7805 build1 (IMAGPART_EXPR, type, call)),
7806 build2 (MODIFY_EXPR, void_type_node,
7807 build_fold_indirect_ref_loc (loc, arg2),
7808 build1 (REALPART_EXPR, type, call)));
7809 }
7810
7811 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7812 NULL_TREE if no simplification can be made. */
7813
7814 static tree
7815 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7816 {
7817 tree rtype;
7818 tree realp, imagp, ifn;
7819 tree res;
7820
7821 if (!validate_arg (arg0, COMPLEX_TYPE)
7822 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7823 return NULL_TREE;
7824
7825 /* Calculate the result when the argument is a constant. */
7826 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7827 return res;
7828
7829 rtype = TREE_TYPE (TREE_TYPE (arg0));
7830
7831 /* In case we can figure out the real part of arg0 and it is constant zero
7832 fold to cexpi. */
7833 if (!targetm.libc_has_function (function_c99_math_complex))
7834 return NULL_TREE;
7835 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7836 if (!ifn)
7837 return NULL_TREE;
7838
7839 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7840 && real_zerop (realp))
7841 {
7842 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7843 return build_call_expr_loc (loc, ifn, 1, narg);
7844 }
7845
7846 /* In case we can easily decompose real and imaginary parts split cexp
7847 to exp (r) * cexpi (i). */
7848 if (flag_unsafe_math_optimizations
7849 && realp)
7850 {
7851 tree rfn, rcall, icall;
7852
7853 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7854 if (!rfn)
7855 return NULL_TREE;
7856
7857 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7858 if (!imagp)
7859 return NULL_TREE;
7860
7861 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7862 icall = builtin_save_expr (icall);
7863 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7864 rcall = builtin_save_expr (rcall);
7865 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7866 fold_build2_loc (loc, MULT_EXPR, rtype,
7867 rcall,
7868 fold_build1_loc (loc, REALPART_EXPR,
7869 rtype, icall)),
7870 fold_build2_loc (loc, MULT_EXPR, rtype,
7871 rcall,
7872 fold_build1_loc (loc, IMAGPART_EXPR,
7873 rtype, icall)));
7874 }
7875
7876 return NULL_TREE;
7877 }
7878
7879 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7880 Return NULL_TREE if no simplification can be made. */
7881
7882 static tree
7883 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7884 {
7885 if (!validate_arg (arg, REAL_TYPE))
7886 return NULL_TREE;
7887
7888 /* Optimize trunc of constant value. */
7889 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7890 {
7891 REAL_VALUE_TYPE r, x;
7892 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7893
7894 x = TREE_REAL_CST (arg);
7895 real_trunc (&r, TYPE_MODE (type), &x);
7896 return build_real (type, r);
7897 }
7898
7899 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7900 }
7901
7902 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7903 Return NULL_TREE if no simplification can be made. */
7904
7905 static tree
7906 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7907 {
7908 if (!validate_arg (arg, REAL_TYPE))
7909 return NULL_TREE;
7910
7911 /* Optimize floor of constant value. */
7912 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7913 {
7914 REAL_VALUE_TYPE x;
7915
7916 x = TREE_REAL_CST (arg);
7917 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7918 {
7919 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7920 REAL_VALUE_TYPE r;
7921
7922 real_floor (&r, TYPE_MODE (type), &x);
7923 return build_real (type, r);
7924 }
7925 }
7926
7927 /* Fold floor (x) where x is nonnegative to trunc (x). */
7928 if (tree_expr_nonnegative_p (arg))
7929 {
7930 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7931 if (truncfn)
7932 return build_call_expr_loc (loc, truncfn, 1, arg);
7933 }
7934
7935 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7936 }
7937
7938 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7939 Return NULL_TREE if no simplification can be made. */
7940
7941 static tree
7942 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7943 {
7944 if (!validate_arg (arg, REAL_TYPE))
7945 return NULL_TREE;
7946
7947 /* Optimize ceil of constant value. */
7948 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7949 {
7950 REAL_VALUE_TYPE x;
7951
7952 x = TREE_REAL_CST (arg);
7953 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7954 {
7955 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7956 REAL_VALUE_TYPE r;
7957
7958 real_ceil (&r, TYPE_MODE (type), &x);
7959 return build_real (type, r);
7960 }
7961 }
7962
7963 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7964 }
7965
7966 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7967 Return NULL_TREE if no simplification can be made. */
7968
7969 static tree
7970 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7971 {
7972 if (!validate_arg (arg, REAL_TYPE))
7973 return NULL_TREE;
7974
7975 /* Optimize round of constant value. */
7976 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7977 {
7978 REAL_VALUE_TYPE x;
7979
7980 x = TREE_REAL_CST (arg);
7981 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7982 {
7983 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7984 REAL_VALUE_TYPE r;
7985
7986 real_round (&r, TYPE_MODE (type), &x);
7987 return build_real (type, r);
7988 }
7989 }
7990
7991 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7992 }
7993
7994 /* Fold function call to builtin lround, lroundf or lroundl (or the
7995 corresponding long long versions) and other rounding functions. ARG
7996 is the argument to the call. Return NULL_TREE if no simplification
7997 can be made. */
7998
7999 static tree
8000 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8001 {
8002 if (!validate_arg (arg, REAL_TYPE))
8003 return NULL_TREE;
8004
8005 /* Optimize lround of constant value. */
8006 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8007 {
8008 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8009
8010 if (real_isfinite (&x))
8011 {
8012 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8013 tree ftype = TREE_TYPE (arg);
8014 double_int val;
8015 REAL_VALUE_TYPE r;
8016
8017 switch (DECL_FUNCTION_CODE (fndecl))
8018 {
8019 CASE_FLT_FN (BUILT_IN_IFLOOR):
8020 CASE_FLT_FN (BUILT_IN_LFLOOR):
8021 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8022 real_floor (&r, TYPE_MODE (ftype), &x);
8023 break;
8024
8025 CASE_FLT_FN (BUILT_IN_ICEIL):
8026 CASE_FLT_FN (BUILT_IN_LCEIL):
8027 CASE_FLT_FN (BUILT_IN_LLCEIL):
8028 real_ceil (&r, TYPE_MODE (ftype), &x);
8029 break;
8030
8031 CASE_FLT_FN (BUILT_IN_IROUND):
8032 CASE_FLT_FN (BUILT_IN_LROUND):
8033 CASE_FLT_FN (BUILT_IN_LLROUND):
8034 real_round (&r, TYPE_MODE (ftype), &x);
8035 break;
8036
8037 default:
8038 gcc_unreachable ();
8039 }
8040
8041 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8042 if (double_int_fits_to_tree_p (itype, val))
8043 return double_int_to_tree (itype, val);
8044 }
8045 }
8046
8047 switch (DECL_FUNCTION_CODE (fndecl))
8048 {
8049 CASE_FLT_FN (BUILT_IN_LFLOOR):
8050 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8051 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8052 if (tree_expr_nonnegative_p (arg))
8053 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8054 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8055 break;
8056 default:;
8057 }
8058
8059 return fold_fixed_mathfn (loc, fndecl, arg);
8060 }
8061
8062 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8063 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8064 the argument to the call. Return NULL_TREE if no simplification can
8065 be made. */
8066
8067 static tree
8068 fold_builtin_bitop (tree fndecl, tree arg)
8069 {
8070 if (!validate_arg (arg, INTEGER_TYPE))
8071 return NULL_TREE;
8072
8073 /* Optimize for constant argument. */
8074 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8075 {
8076 HOST_WIDE_INT hi, width, result;
8077 unsigned HOST_WIDE_INT lo;
8078 tree type;
8079
8080 type = TREE_TYPE (arg);
8081 width = TYPE_PRECISION (type);
8082 lo = TREE_INT_CST_LOW (arg);
8083
8084 /* Clear all the bits that are beyond the type's precision. */
8085 if (width > HOST_BITS_PER_WIDE_INT)
8086 {
8087 hi = TREE_INT_CST_HIGH (arg);
8088 if (width < HOST_BITS_PER_DOUBLE_INT)
8089 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8090 }
8091 else
8092 {
8093 hi = 0;
8094 if (width < HOST_BITS_PER_WIDE_INT)
8095 lo &= ~(HOST_WIDE_INT_M1U << width);
8096 }
8097
8098 switch (DECL_FUNCTION_CODE (fndecl))
8099 {
8100 CASE_INT_FN (BUILT_IN_FFS):
8101 if (lo != 0)
8102 result = ffs_hwi (lo);
8103 else if (hi != 0)
8104 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8105 else
8106 result = 0;
8107 break;
8108
8109 CASE_INT_FN (BUILT_IN_CLZ):
8110 if (hi != 0)
8111 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8112 else if (lo != 0)
8113 result = width - floor_log2 (lo) - 1;
8114 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8115 result = width;
8116 break;
8117
8118 CASE_INT_FN (BUILT_IN_CTZ):
8119 if (lo != 0)
8120 result = ctz_hwi (lo);
8121 else if (hi != 0)
8122 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8123 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8124 result = width;
8125 break;
8126
8127 CASE_INT_FN (BUILT_IN_CLRSB):
8128 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8129 return NULL_TREE;
8130 if (width > HOST_BITS_PER_WIDE_INT
8131 && (hi & ((unsigned HOST_WIDE_INT) 1
8132 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8133 {
8134 hi = ~hi & ~(HOST_WIDE_INT_M1U
8135 << (width - HOST_BITS_PER_WIDE_INT - 1));
8136 lo = ~lo;
8137 }
8138 else if (width <= HOST_BITS_PER_WIDE_INT
8139 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8140 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8141 if (hi != 0)
8142 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8143 else if (lo != 0)
8144 result = width - floor_log2 (lo) - 2;
8145 else
8146 result = width - 1;
8147 break;
8148
8149 CASE_INT_FN (BUILT_IN_POPCOUNT):
8150 result = 0;
8151 while (lo)
8152 result++, lo &= lo - 1;
8153 while (hi)
8154 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8155 break;
8156
8157 CASE_INT_FN (BUILT_IN_PARITY):
8158 result = 0;
8159 while (lo)
8160 result++, lo &= lo - 1;
8161 while (hi)
8162 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8163 result &= 1;
8164 break;
8165
8166 default:
8167 gcc_unreachable ();
8168 }
8169
8170 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8171 }
8172
8173 return NULL_TREE;
8174 }
8175
8176 /* Fold function call to builtin_bswap and the short, long and long long
8177 variants. Return NULL_TREE if no simplification can be made. */
8178 static tree
8179 fold_builtin_bswap (tree fndecl, tree arg)
8180 {
8181 if (! validate_arg (arg, INTEGER_TYPE))
8182 return NULL_TREE;
8183
8184 /* Optimize constant value. */
8185 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8186 {
8187 HOST_WIDE_INT hi, width, r_hi = 0;
8188 unsigned HOST_WIDE_INT lo, r_lo = 0;
8189 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8190
8191 width = TYPE_PRECISION (type);
8192 lo = TREE_INT_CST_LOW (arg);
8193 hi = TREE_INT_CST_HIGH (arg);
8194
8195 switch (DECL_FUNCTION_CODE (fndecl))
8196 {
8197 case BUILT_IN_BSWAP16:
8198 case BUILT_IN_BSWAP32:
8199 case BUILT_IN_BSWAP64:
8200 {
8201 int s;
8202
8203 for (s = 0; s < width; s += 8)
8204 {
8205 int d = width - s - 8;
8206 unsigned HOST_WIDE_INT byte;
8207
8208 if (s < HOST_BITS_PER_WIDE_INT)
8209 byte = (lo >> s) & 0xff;
8210 else
8211 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8212
8213 if (d < HOST_BITS_PER_WIDE_INT)
8214 r_lo |= byte << d;
8215 else
8216 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8217 }
8218 }
8219
8220 break;
8221
8222 default:
8223 gcc_unreachable ();
8224 }
8225
8226 if (width < HOST_BITS_PER_WIDE_INT)
8227 return build_int_cst (type, r_lo);
8228 else
8229 return build_int_cst_wide (type, r_lo, r_hi);
8230 }
8231
8232 return NULL_TREE;
8233 }
8234
8235 /* A subroutine of fold_builtin to fold the various logarithmic
8236 functions. Return NULL_TREE if no simplification can me made.
8237 FUNC is the corresponding MPFR logarithm function. */
8238
8239 static tree
8240 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8241 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8242 {
8243 if (validate_arg (arg, REAL_TYPE))
8244 {
8245 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8246 tree res;
8247 const enum built_in_function fcode = builtin_mathfn_code (arg);
8248
8249 /* Calculate the result when the argument is a constant. */
8250 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8251 return res;
8252
8253 /* Special case, optimize logN(expN(x)) = x. */
8254 if (flag_unsafe_math_optimizations
8255 && ((func == mpfr_log
8256 && (fcode == BUILT_IN_EXP
8257 || fcode == BUILT_IN_EXPF
8258 || fcode == BUILT_IN_EXPL))
8259 || (func == mpfr_log2
8260 && (fcode == BUILT_IN_EXP2
8261 || fcode == BUILT_IN_EXP2F
8262 || fcode == BUILT_IN_EXP2L))
8263 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8264 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8265
8266 /* Optimize logN(func()) for various exponential functions. We
8267 want to determine the value "x" and the power "exponent" in
8268 order to transform logN(x**exponent) into exponent*logN(x). */
8269 if (flag_unsafe_math_optimizations)
8270 {
8271 tree exponent = 0, x = 0;
8272
8273 switch (fcode)
8274 {
8275 CASE_FLT_FN (BUILT_IN_EXP):
8276 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8277 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8278 dconst_e ()));
8279 exponent = CALL_EXPR_ARG (arg, 0);
8280 break;
8281 CASE_FLT_FN (BUILT_IN_EXP2):
8282 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8283 x = build_real (type, dconst2);
8284 exponent = CALL_EXPR_ARG (arg, 0);
8285 break;
8286 CASE_FLT_FN (BUILT_IN_EXP10):
8287 CASE_FLT_FN (BUILT_IN_POW10):
8288 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8289 {
8290 REAL_VALUE_TYPE dconst10;
8291 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8292 x = build_real (type, dconst10);
8293 }
8294 exponent = CALL_EXPR_ARG (arg, 0);
8295 break;
8296 CASE_FLT_FN (BUILT_IN_SQRT):
8297 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8298 x = CALL_EXPR_ARG (arg, 0);
8299 exponent = build_real (type, dconsthalf);
8300 break;
8301 CASE_FLT_FN (BUILT_IN_CBRT):
8302 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8303 x = CALL_EXPR_ARG (arg, 0);
8304 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8305 dconst_third ()));
8306 break;
8307 CASE_FLT_FN (BUILT_IN_POW):
8308 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8309 x = CALL_EXPR_ARG (arg, 0);
8310 exponent = CALL_EXPR_ARG (arg, 1);
8311 break;
8312 default:
8313 break;
8314 }
8315
8316 /* Now perform the optimization. */
8317 if (x && exponent)
8318 {
8319 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8320 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8321 }
8322 }
8323 }
8324
8325 return NULL_TREE;
8326 }
8327
8328 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8329 NULL_TREE if no simplification can be made. */
8330
8331 static tree
8332 fold_builtin_hypot (location_t loc, tree fndecl,
8333 tree arg0, tree arg1, tree type)
8334 {
8335 tree res, narg0, narg1;
8336
8337 if (!validate_arg (arg0, REAL_TYPE)
8338 || !validate_arg (arg1, REAL_TYPE))
8339 return NULL_TREE;
8340
8341 /* Calculate the result when the argument is a constant. */
8342 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8343 return res;
8344
8345 /* If either argument to hypot has a negate or abs, strip that off.
8346 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8347 narg0 = fold_strip_sign_ops (arg0);
8348 narg1 = fold_strip_sign_ops (arg1);
8349 if (narg0 || narg1)
8350 {
8351 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8352 narg1 ? narg1 : arg1);
8353 }
8354
8355 /* If either argument is zero, hypot is fabs of the other. */
8356 if (real_zerop (arg0))
8357 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8358 else if (real_zerop (arg1))
8359 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8360
8361 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8362 if (flag_unsafe_math_optimizations
8363 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8364 {
8365 const REAL_VALUE_TYPE sqrt2_trunc
8366 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8367 return fold_build2_loc (loc, MULT_EXPR, type,
8368 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8369 build_real (type, sqrt2_trunc));
8370 }
8371
8372 return NULL_TREE;
8373 }
8374
8375
8376 /* Fold a builtin function call to pow, powf, or powl. Return
8377 NULL_TREE if no simplification can be made. */
8378 static tree
8379 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8380 {
8381 tree res;
8382
8383 if (!validate_arg (arg0, REAL_TYPE)
8384 || !validate_arg (arg1, REAL_TYPE))
8385 return NULL_TREE;
8386
8387 /* Calculate the result when the argument is a constant. */
8388 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8389 return res;
8390
8391 /* Optimize pow(1.0,y) = 1.0. */
8392 if (real_onep (arg0))
8393 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8394
8395 if (TREE_CODE (arg1) == REAL_CST
8396 && !TREE_OVERFLOW (arg1))
8397 {
8398 REAL_VALUE_TYPE cint;
8399 REAL_VALUE_TYPE c;
8400 HOST_WIDE_INT n;
8401
8402 c = TREE_REAL_CST (arg1);
8403
8404 /* Optimize pow(x,0.0) = 1.0. */
8405 if (REAL_VALUES_EQUAL (c, dconst0))
8406 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8407 arg0);
8408
8409 /* Optimize pow(x,1.0) = x. */
8410 if (REAL_VALUES_EQUAL (c, dconst1))
8411 return arg0;
8412
8413 /* Optimize pow(x,-1.0) = 1.0/x. */
8414 if (REAL_VALUES_EQUAL (c, dconstm1))
8415 return fold_build2_loc (loc, RDIV_EXPR, type,
8416 build_real (type, dconst1), arg0);
8417
8418 /* Optimize pow(x,0.5) = sqrt(x). */
8419 if (flag_unsafe_math_optimizations
8420 && REAL_VALUES_EQUAL (c, dconsthalf))
8421 {
8422 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8423
8424 if (sqrtfn != NULL_TREE)
8425 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8426 }
8427
8428 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8429 if (flag_unsafe_math_optimizations)
8430 {
8431 const REAL_VALUE_TYPE dconstroot
8432 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8433
8434 if (REAL_VALUES_EQUAL (c, dconstroot))
8435 {
8436 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8437 if (cbrtfn != NULL_TREE)
8438 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8439 }
8440 }
8441
8442 /* Check for an integer exponent. */
8443 n = real_to_integer (&c);
8444 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8445 if (real_identical (&c, &cint))
8446 {
8447 /* Attempt to evaluate pow at compile-time, unless this should
8448 raise an exception. */
8449 if (TREE_CODE (arg0) == REAL_CST
8450 && !TREE_OVERFLOW (arg0)
8451 && (n > 0
8452 || (!flag_trapping_math && !flag_errno_math)
8453 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8454 {
8455 REAL_VALUE_TYPE x;
8456 bool inexact;
8457
8458 x = TREE_REAL_CST (arg0);
8459 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8460 if (flag_unsafe_math_optimizations || !inexact)
8461 return build_real (type, x);
8462 }
8463
8464 /* Strip sign ops from even integer powers. */
8465 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8466 {
8467 tree narg0 = fold_strip_sign_ops (arg0);
8468 if (narg0)
8469 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8470 }
8471 }
8472 }
8473
8474 if (flag_unsafe_math_optimizations)
8475 {
8476 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8477
8478 /* Optimize pow(expN(x),y) = expN(x*y). */
8479 if (BUILTIN_EXPONENT_P (fcode))
8480 {
8481 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8482 tree arg = CALL_EXPR_ARG (arg0, 0);
8483 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8484 return build_call_expr_loc (loc, expfn, 1, arg);
8485 }
8486
8487 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8488 if (BUILTIN_SQRT_P (fcode))
8489 {
8490 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8491 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8492 build_real (type, dconsthalf));
8493 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8494 }
8495
8496 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8497 if (BUILTIN_CBRT_P (fcode))
8498 {
8499 tree arg = CALL_EXPR_ARG (arg0, 0);
8500 if (tree_expr_nonnegative_p (arg))
8501 {
8502 const REAL_VALUE_TYPE dconstroot
8503 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8504 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8505 build_real (type, dconstroot));
8506 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8507 }
8508 }
8509
8510 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8511 if (fcode == BUILT_IN_POW
8512 || fcode == BUILT_IN_POWF
8513 || fcode == BUILT_IN_POWL)
8514 {
8515 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8516 if (tree_expr_nonnegative_p (arg00))
8517 {
8518 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8519 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8520 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8521 }
8522 }
8523 }
8524
8525 return NULL_TREE;
8526 }
8527
8528 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8529 Return NULL_TREE if no simplification can be made. */
8530 static tree
8531 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8532 tree arg0, tree arg1, tree type)
8533 {
8534 if (!validate_arg (arg0, REAL_TYPE)
8535 || !validate_arg (arg1, INTEGER_TYPE))
8536 return NULL_TREE;
8537
8538 /* Optimize pow(1.0,y) = 1.0. */
8539 if (real_onep (arg0))
8540 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8541
8542 if (tree_fits_shwi_p (arg1))
8543 {
8544 HOST_WIDE_INT c = tree_to_shwi (arg1);
8545
8546 /* Evaluate powi at compile-time. */
8547 if (TREE_CODE (arg0) == REAL_CST
8548 && !TREE_OVERFLOW (arg0))
8549 {
8550 REAL_VALUE_TYPE x;
8551 x = TREE_REAL_CST (arg0);
8552 real_powi (&x, TYPE_MODE (type), &x, c);
8553 return build_real (type, x);
8554 }
8555
8556 /* Optimize pow(x,0) = 1.0. */
8557 if (c == 0)
8558 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8559 arg0);
8560
8561 /* Optimize pow(x,1) = x. */
8562 if (c == 1)
8563 return arg0;
8564
8565 /* Optimize pow(x,-1) = 1.0/x. */
8566 if (c == -1)
8567 return fold_build2_loc (loc, RDIV_EXPR, type,
8568 build_real (type, dconst1), arg0);
8569 }
8570
8571 return NULL_TREE;
8572 }
8573
8574 /* A subroutine of fold_builtin to fold the various exponent
8575 functions. Return NULL_TREE if no simplification can be made.
8576 FUNC is the corresponding MPFR exponent function. */
8577
8578 static tree
8579 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8580 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8581 {
8582 if (validate_arg (arg, REAL_TYPE))
8583 {
8584 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8585 tree res;
8586
8587 /* Calculate the result when the argument is a constant. */
8588 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8589 return res;
8590
8591 /* Optimize expN(logN(x)) = x. */
8592 if (flag_unsafe_math_optimizations)
8593 {
8594 const enum built_in_function fcode = builtin_mathfn_code (arg);
8595
8596 if ((func == mpfr_exp
8597 && (fcode == BUILT_IN_LOG
8598 || fcode == BUILT_IN_LOGF
8599 || fcode == BUILT_IN_LOGL))
8600 || (func == mpfr_exp2
8601 && (fcode == BUILT_IN_LOG2
8602 || fcode == BUILT_IN_LOG2F
8603 || fcode == BUILT_IN_LOG2L))
8604 || (func == mpfr_exp10
8605 && (fcode == BUILT_IN_LOG10
8606 || fcode == BUILT_IN_LOG10F
8607 || fcode == BUILT_IN_LOG10L)))
8608 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8609 }
8610 }
8611
8612 return NULL_TREE;
8613 }
8614
8615 /* Return true if VAR is a VAR_DECL or a component thereof. */
8616
8617 static bool
8618 var_decl_component_p (tree var)
8619 {
8620 tree inner = var;
8621 while (handled_component_p (inner))
8622 inner = TREE_OPERAND (inner, 0);
8623 return SSA_VAR_P (inner);
8624 }
8625
8626 /* Fold function call to builtin memset. Return
8627 NULL_TREE if no simplification can be made. */
8628
8629 static tree
8630 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8631 tree type, bool ignore)
8632 {
8633 tree var, ret, etype;
8634 unsigned HOST_WIDE_INT length, cval;
8635
8636 if (! validate_arg (dest, POINTER_TYPE)
8637 || ! validate_arg (c, INTEGER_TYPE)
8638 || ! validate_arg (len, INTEGER_TYPE))
8639 return NULL_TREE;
8640
8641 if (! tree_fits_uhwi_p (len))
8642 return NULL_TREE;
8643
8644 /* If the LEN parameter is zero, return DEST. */
8645 if (integer_zerop (len))
8646 return omit_one_operand_loc (loc, type, dest, c);
8647
8648 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8649 return NULL_TREE;
8650
8651 var = dest;
8652 STRIP_NOPS (var);
8653 if (TREE_CODE (var) != ADDR_EXPR)
8654 return NULL_TREE;
8655
8656 var = TREE_OPERAND (var, 0);
8657 if (TREE_THIS_VOLATILE (var))
8658 return NULL_TREE;
8659
8660 etype = TREE_TYPE (var);
8661 if (TREE_CODE (etype) == ARRAY_TYPE)
8662 etype = TREE_TYPE (etype);
8663
8664 if (!INTEGRAL_TYPE_P (etype)
8665 && !POINTER_TYPE_P (etype))
8666 return NULL_TREE;
8667
8668 if (! var_decl_component_p (var))
8669 return NULL_TREE;
8670
8671 length = tree_to_uhwi (len);
8672 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8673 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8674 return NULL_TREE;
8675
8676 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8677 return NULL_TREE;
8678
8679 if (integer_zerop (c))
8680 cval = 0;
8681 else
8682 {
8683 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8684 return NULL_TREE;
8685
8686 cval = TREE_INT_CST_LOW (c);
8687 cval &= 0xff;
8688 cval |= cval << 8;
8689 cval |= cval << 16;
8690 cval |= (cval << 31) << 1;
8691 }
8692
8693 ret = build_int_cst_type (etype, cval);
8694 var = build_fold_indirect_ref_loc (loc,
8695 fold_convert_loc (loc,
8696 build_pointer_type (etype),
8697 dest));
8698 ret = build2 (MODIFY_EXPR, etype, var, ret);
8699 if (ignore)
8700 return ret;
8701
8702 return omit_one_operand_loc (loc, type, dest, ret);
8703 }
8704
8705 /* Fold function call to builtin memset. Return
8706 NULL_TREE if no simplification can be made. */
8707
8708 static tree
8709 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8710 {
8711 if (! validate_arg (dest, POINTER_TYPE)
8712 || ! validate_arg (size, INTEGER_TYPE))
8713 return NULL_TREE;
8714
8715 if (!ignore)
8716 return NULL_TREE;
8717
8718 /* New argument list transforming bzero(ptr x, int y) to
8719 memset(ptr x, int 0, size_t y). This is done this way
8720 so that if it isn't expanded inline, we fallback to
8721 calling bzero instead of memset. */
8722
8723 return fold_builtin_memset (loc, dest, integer_zero_node,
8724 fold_convert_loc (loc, size_type_node, size),
8725 void_type_node, ignore);
8726 }
8727
8728 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8729 NULL_TREE if no simplification can be made.
8730 If ENDP is 0, return DEST (like memcpy).
8731 If ENDP is 1, return DEST+LEN (like mempcpy).
8732 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8733 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8734 (memmove). */
8735
8736 static tree
8737 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8738 tree len, tree type, bool ignore, int endp)
8739 {
8740 tree destvar, srcvar, expr;
8741
8742 if (! validate_arg (dest, POINTER_TYPE)
8743 || ! validate_arg (src, POINTER_TYPE)
8744 || ! validate_arg (len, INTEGER_TYPE))
8745 return NULL_TREE;
8746
8747 /* If the LEN parameter is zero, return DEST. */
8748 if (integer_zerop (len))
8749 return omit_one_operand_loc (loc, type, dest, src);
8750
8751 /* If SRC and DEST are the same (and not volatile), return
8752 DEST{,+LEN,+LEN-1}. */
8753 if (operand_equal_p (src, dest, 0))
8754 expr = len;
8755 else
8756 {
8757 tree srctype, desttype;
8758 unsigned int src_align, dest_align;
8759 tree off0;
8760
8761 if (endp == 3)
8762 {
8763 src_align = get_pointer_alignment (src);
8764 dest_align = get_pointer_alignment (dest);
8765
8766 /* Both DEST and SRC must be pointer types.
8767 ??? This is what old code did. Is the testing for pointer types
8768 really mandatory?
8769
8770 If either SRC is readonly or length is 1, we can use memcpy. */
8771 if (!dest_align || !src_align)
8772 return NULL_TREE;
8773 if (readonly_data_expr (src)
8774 || (tree_fits_uhwi_p (len)
8775 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8776 >= tree_to_uhwi (len))))
8777 {
8778 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8779 if (!fn)
8780 return NULL_TREE;
8781 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8782 }
8783
8784 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8785 if (TREE_CODE (src) == ADDR_EXPR
8786 && TREE_CODE (dest) == ADDR_EXPR)
8787 {
8788 tree src_base, dest_base, fn;
8789 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8790 HOST_WIDE_INT size = -1;
8791 HOST_WIDE_INT maxsize = -1;
8792
8793 srcvar = TREE_OPERAND (src, 0);
8794 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8795 &size, &maxsize);
8796 destvar = TREE_OPERAND (dest, 0);
8797 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8798 &size, &maxsize);
8799 if (tree_fits_uhwi_p (len))
8800 maxsize = tree_to_uhwi (len);
8801 else
8802 maxsize = -1;
8803 src_offset /= BITS_PER_UNIT;
8804 dest_offset /= BITS_PER_UNIT;
8805 if (SSA_VAR_P (src_base)
8806 && SSA_VAR_P (dest_base))
8807 {
8808 if (operand_equal_p (src_base, dest_base, 0)
8809 && ranges_overlap_p (src_offset, maxsize,
8810 dest_offset, maxsize))
8811 return NULL_TREE;
8812 }
8813 else if (TREE_CODE (src_base) == MEM_REF
8814 && TREE_CODE (dest_base) == MEM_REF)
8815 {
8816 double_int off;
8817 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8818 TREE_OPERAND (dest_base, 0), 0))
8819 return NULL_TREE;
8820 off = mem_ref_offset (src_base) +
8821 double_int::from_shwi (src_offset);
8822 if (!off.fits_shwi ())
8823 return NULL_TREE;
8824 src_offset = off.low;
8825 off = mem_ref_offset (dest_base) +
8826 double_int::from_shwi (dest_offset);
8827 if (!off.fits_shwi ())
8828 return NULL_TREE;
8829 dest_offset = off.low;
8830 if (ranges_overlap_p (src_offset, maxsize,
8831 dest_offset, maxsize))
8832 return NULL_TREE;
8833 }
8834 else
8835 return NULL_TREE;
8836
8837 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8838 if (!fn)
8839 return NULL_TREE;
8840 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8841 }
8842
8843 /* If the destination and source do not alias optimize into
8844 memcpy as well. */
8845 if ((is_gimple_min_invariant (dest)
8846 || TREE_CODE (dest) == SSA_NAME)
8847 && (is_gimple_min_invariant (src)
8848 || TREE_CODE (src) == SSA_NAME))
8849 {
8850 ao_ref destr, srcr;
8851 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8852 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8853 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8854 {
8855 tree fn;
8856 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8857 if (!fn)
8858 return NULL_TREE;
8859 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8860 }
8861 }
8862
8863 return NULL_TREE;
8864 }
8865
8866 if (!tree_fits_shwi_p (len))
8867 return NULL_TREE;
8868 /* FIXME:
8869 This logic lose for arguments like (type *)malloc (sizeof (type)),
8870 since we strip the casts of up to VOID return value from malloc.
8871 Perhaps we ought to inherit type from non-VOID argument here? */
8872 STRIP_NOPS (src);
8873 STRIP_NOPS (dest);
8874 if (!POINTER_TYPE_P (TREE_TYPE (src))
8875 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8876 return NULL_TREE;
8877 /* In the following try to find a type that is most natural to be
8878 used for the memcpy source and destination and that allows
8879 the most optimization when memcpy is turned into a plain assignment
8880 using that type. In theory we could always use a char[len] type
8881 but that only gains us that the destination and source possibly
8882 no longer will have their address taken. */
8883 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8884 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8885 {
8886 tree tem = TREE_OPERAND (src, 0);
8887 STRIP_NOPS (tem);
8888 if (tem != TREE_OPERAND (src, 0))
8889 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8890 }
8891 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8892 {
8893 tree tem = TREE_OPERAND (dest, 0);
8894 STRIP_NOPS (tem);
8895 if (tem != TREE_OPERAND (dest, 0))
8896 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8897 }
8898 srctype = TREE_TYPE (TREE_TYPE (src));
8899 if (TREE_CODE (srctype) == ARRAY_TYPE
8900 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8901 {
8902 srctype = TREE_TYPE (srctype);
8903 STRIP_NOPS (src);
8904 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8905 }
8906 desttype = TREE_TYPE (TREE_TYPE (dest));
8907 if (TREE_CODE (desttype) == ARRAY_TYPE
8908 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8909 {
8910 desttype = TREE_TYPE (desttype);
8911 STRIP_NOPS (dest);
8912 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8913 }
8914 if (TREE_ADDRESSABLE (srctype)
8915 || TREE_ADDRESSABLE (desttype))
8916 return NULL_TREE;
8917
8918 /* Make sure we are not copying using a floating-point mode or
8919 a type whose size possibly does not match its precision. */
8920 if (FLOAT_MODE_P (TYPE_MODE (desttype))
8921 || TREE_CODE (desttype) == BOOLEAN_TYPE
8922 || TREE_CODE (desttype) == ENUMERAL_TYPE)
8923 {
8924 /* A more suitable int_mode_for_mode would return a vector
8925 integer mode for a vector float mode or a integer complex
8926 mode for a float complex mode if there isn't a regular
8927 integer mode covering the mode of desttype. */
8928 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (desttype));
8929 if (mode == BLKmode)
8930 desttype = NULL_TREE;
8931 else
8932 desttype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode),
8933 1);
8934 }
8935 if (FLOAT_MODE_P (TYPE_MODE (srctype))
8936 || TREE_CODE (srctype) == BOOLEAN_TYPE
8937 || TREE_CODE (srctype) == ENUMERAL_TYPE)
8938 {
8939 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (srctype));
8940 if (mode == BLKmode)
8941 srctype = NULL_TREE;
8942 else
8943 srctype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode),
8944 1);
8945 }
8946 if (!srctype)
8947 srctype = desttype;
8948 if (!desttype)
8949 desttype = srctype;
8950 if (!srctype)
8951 return NULL_TREE;
8952
8953 src_align = get_pointer_alignment (src);
8954 dest_align = get_pointer_alignment (dest);
8955 if (dest_align < TYPE_ALIGN (desttype)
8956 || src_align < TYPE_ALIGN (srctype))
8957 return NULL_TREE;
8958
8959 if (!ignore)
8960 dest = builtin_save_expr (dest);
8961
8962 /* Build accesses at offset zero with a ref-all character type. */
8963 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8964 ptr_mode, true), 0);
8965
8966 destvar = dest;
8967 STRIP_NOPS (destvar);
8968 if (TREE_CODE (destvar) == ADDR_EXPR
8969 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8970 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8971 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8972 else
8973 destvar = NULL_TREE;
8974
8975 srcvar = src;
8976 STRIP_NOPS (srcvar);
8977 if (TREE_CODE (srcvar) == ADDR_EXPR
8978 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8979 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8980 {
8981 if (!destvar
8982 || src_align >= TYPE_ALIGN (desttype))
8983 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8984 srcvar, off0);
8985 else if (!STRICT_ALIGNMENT)
8986 {
8987 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8988 src_align);
8989 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8990 }
8991 else
8992 srcvar = NULL_TREE;
8993 }
8994 else
8995 srcvar = NULL_TREE;
8996
8997 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8998 return NULL_TREE;
8999
9000 if (srcvar == NULL_TREE)
9001 {
9002 STRIP_NOPS (src);
9003 if (src_align >= TYPE_ALIGN (desttype))
9004 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9005 else
9006 {
9007 if (STRICT_ALIGNMENT)
9008 return NULL_TREE;
9009 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9010 src_align);
9011 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9012 }
9013 }
9014 else if (destvar == NULL_TREE)
9015 {
9016 STRIP_NOPS (dest);
9017 if (dest_align >= TYPE_ALIGN (srctype))
9018 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9019 else
9020 {
9021 if (STRICT_ALIGNMENT)
9022 return NULL_TREE;
9023 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9024 dest_align);
9025 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9026 }
9027 }
9028
9029 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9030 }
9031
9032 if (ignore)
9033 return expr;
9034
9035 if (endp == 0 || endp == 3)
9036 return omit_one_operand_loc (loc, type, dest, expr);
9037
9038 if (expr == len)
9039 expr = NULL_TREE;
9040
9041 if (endp == 2)
9042 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9043 ssize_int (1));
9044
9045 dest = fold_build_pointer_plus_loc (loc, dest, len);
9046 dest = fold_convert_loc (loc, type, dest);
9047 if (expr)
9048 dest = omit_one_operand_loc (loc, type, dest, expr);
9049 return dest;
9050 }
9051
9052 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9053 If LEN is not NULL, it represents the length of the string to be
9054 copied. Return NULL_TREE if no simplification can be made. */
9055
9056 tree
9057 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9058 {
9059 tree fn;
9060
9061 if (!validate_arg (dest, POINTER_TYPE)
9062 || !validate_arg (src, POINTER_TYPE))
9063 return NULL_TREE;
9064
9065 /* If SRC and DEST are the same (and not volatile), return DEST. */
9066 if (operand_equal_p (src, dest, 0))
9067 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9068
9069 if (optimize_function_for_size_p (cfun))
9070 return NULL_TREE;
9071
9072 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9073 if (!fn)
9074 return NULL_TREE;
9075
9076 if (!len)
9077 {
9078 len = c_strlen (src, 1);
9079 if (! len || TREE_SIDE_EFFECTS (len))
9080 return NULL_TREE;
9081 }
9082
9083 len = fold_convert_loc (loc, size_type_node, len);
9084 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9085 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9086 build_call_expr_loc (loc, fn, 3, dest, src, len));
9087 }
9088
9089 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9090 Return NULL_TREE if no simplification can be made. */
9091
9092 static tree
9093 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9094 {
9095 tree fn, len, lenp1, call, type;
9096
9097 if (!validate_arg (dest, POINTER_TYPE)
9098 || !validate_arg (src, POINTER_TYPE))
9099 return NULL_TREE;
9100
9101 len = c_strlen (src, 1);
9102 if (!len
9103 || TREE_CODE (len) != INTEGER_CST)
9104 return NULL_TREE;
9105
9106 if (optimize_function_for_size_p (cfun)
9107 /* If length is zero it's small enough. */
9108 && !integer_zerop (len))
9109 return NULL_TREE;
9110
9111 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9112 if (!fn)
9113 return NULL_TREE;
9114
9115 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9116 fold_convert_loc (loc, size_type_node, len),
9117 build_int_cst (size_type_node, 1));
9118 /* We use dest twice in building our expression. Save it from
9119 multiple expansions. */
9120 dest = builtin_save_expr (dest);
9121 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9122
9123 type = TREE_TYPE (TREE_TYPE (fndecl));
9124 dest = fold_build_pointer_plus_loc (loc, dest, len);
9125 dest = fold_convert_loc (loc, type, dest);
9126 dest = omit_one_operand_loc (loc, type, dest, call);
9127 return dest;
9128 }
9129
9130 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9131 If SLEN is not NULL, it represents the length of the source string.
9132 Return NULL_TREE if no simplification can be made. */
9133
9134 tree
9135 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9136 tree src, tree len, tree slen)
9137 {
9138 tree fn;
9139
9140 if (!validate_arg (dest, POINTER_TYPE)
9141 || !validate_arg (src, POINTER_TYPE)
9142 || !validate_arg (len, INTEGER_TYPE))
9143 return NULL_TREE;
9144
9145 /* If the LEN parameter is zero, return DEST. */
9146 if (integer_zerop (len))
9147 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9148
9149 /* We can't compare slen with len as constants below if len is not a
9150 constant. */
9151 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9152 return NULL_TREE;
9153
9154 if (!slen)
9155 slen = c_strlen (src, 1);
9156
9157 /* Now, we must be passed a constant src ptr parameter. */
9158 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9159 return NULL_TREE;
9160
9161 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9162
9163 /* We do not support simplification of this case, though we do
9164 support it when expanding trees into RTL. */
9165 /* FIXME: generate a call to __builtin_memset. */
9166 if (tree_int_cst_lt (slen, len))
9167 return NULL_TREE;
9168
9169 /* OK transform into builtin memcpy. */
9170 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9171 if (!fn)
9172 return NULL_TREE;
9173
9174 len = fold_convert_loc (loc, size_type_node, len);
9175 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9176 build_call_expr_loc (loc, fn, 3, dest, src, len));
9177 }
9178
9179 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9180 arguments to the call, and TYPE is its return type.
9181 Return NULL_TREE if no simplification can be made. */
9182
9183 static tree
9184 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9185 {
9186 if (!validate_arg (arg1, POINTER_TYPE)
9187 || !validate_arg (arg2, INTEGER_TYPE)
9188 || !validate_arg (len, INTEGER_TYPE))
9189 return NULL_TREE;
9190 else
9191 {
9192 const char *p1;
9193
9194 if (TREE_CODE (arg2) != INTEGER_CST
9195 || !tree_fits_uhwi_p (len))
9196 return NULL_TREE;
9197
9198 p1 = c_getstr (arg1);
9199 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9200 {
9201 char c;
9202 const char *r;
9203 tree tem;
9204
9205 if (target_char_cast (arg2, &c))
9206 return NULL_TREE;
9207
9208 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9209
9210 if (r == NULL)
9211 return build_int_cst (TREE_TYPE (arg1), 0);
9212
9213 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9214 return fold_convert_loc (loc, type, tem);
9215 }
9216 return NULL_TREE;
9217 }
9218 }
9219
9220 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9221 Return NULL_TREE if no simplification can be made. */
9222
9223 static tree
9224 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9225 {
9226 const char *p1, *p2;
9227
9228 if (!validate_arg (arg1, POINTER_TYPE)
9229 || !validate_arg (arg2, POINTER_TYPE)
9230 || !validate_arg (len, INTEGER_TYPE))
9231 return NULL_TREE;
9232
9233 /* If the LEN parameter is zero, return zero. */
9234 if (integer_zerop (len))
9235 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9236 arg1, arg2);
9237
9238 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9239 if (operand_equal_p (arg1, arg2, 0))
9240 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9241
9242 p1 = c_getstr (arg1);
9243 p2 = c_getstr (arg2);
9244
9245 /* If all arguments are constant, and the value of len is not greater
9246 than the lengths of arg1 and arg2, evaluate at compile-time. */
9247 if (tree_fits_uhwi_p (len) && p1 && p2
9248 && compare_tree_int (len, strlen (p1) + 1) <= 0
9249 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9250 {
9251 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9252
9253 if (r > 0)
9254 return integer_one_node;
9255 else if (r < 0)
9256 return integer_minus_one_node;
9257 else
9258 return integer_zero_node;
9259 }
9260
9261 /* If len parameter is one, return an expression corresponding to
9262 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9263 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9264 {
9265 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9266 tree cst_uchar_ptr_node
9267 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9268
9269 tree ind1
9270 = fold_convert_loc (loc, integer_type_node,
9271 build1 (INDIRECT_REF, cst_uchar_node,
9272 fold_convert_loc (loc,
9273 cst_uchar_ptr_node,
9274 arg1)));
9275 tree ind2
9276 = fold_convert_loc (loc, integer_type_node,
9277 build1 (INDIRECT_REF, cst_uchar_node,
9278 fold_convert_loc (loc,
9279 cst_uchar_ptr_node,
9280 arg2)));
9281 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9282 }
9283
9284 return NULL_TREE;
9285 }
9286
9287 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9288 Return NULL_TREE if no simplification can be made. */
9289
9290 static tree
9291 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9292 {
9293 const char *p1, *p2;
9294
9295 if (!validate_arg (arg1, POINTER_TYPE)
9296 || !validate_arg (arg2, POINTER_TYPE))
9297 return NULL_TREE;
9298
9299 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9300 if (operand_equal_p (arg1, arg2, 0))
9301 return integer_zero_node;
9302
9303 p1 = c_getstr (arg1);
9304 p2 = c_getstr (arg2);
9305
9306 if (p1 && p2)
9307 {
9308 const int i = strcmp (p1, p2);
9309 if (i < 0)
9310 return integer_minus_one_node;
9311 else if (i > 0)
9312 return integer_one_node;
9313 else
9314 return integer_zero_node;
9315 }
9316
9317 /* If the second arg is "", return *(const unsigned char*)arg1. */
9318 if (p2 && *p2 == '\0')
9319 {
9320 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9321 tree cst_uchar_ptr_node
9322 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9323
9324 return fold_convert_loc (loc, integer_type_node,
9325 build1 (INDIRECT_REF, cst_uchar_node,
9326 fold_convert_loc (loc,
9327 cst_uchar_ptr_node,
9328 arg1)));
9329 }
9330
9331 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9332 if (p1 && *p1 == '\0')
9333 {
9334 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9335 tree cst_uchar_ptr_node
9336 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9337
9338 tree temp
9339 = fold_convert_loc (loc, integer_type_node,
9340 build1 (INDIRECT_REF, cst_uchar_node,
9341 fold_convert_loc (loc,
9342 cst_uchar_ptr_node,
9343 arg2)));
9344 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9345 }
9346
9347 return NULL_TREE;
9348 }
9349
9350 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9351 Return NULL_TREE if no simplification can be made. */
9352
9353 static tree
9354 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9355 {
9356 const char *p1, *p2;
9357
9358 if (!validate_arg (arg1, POINTER_TYPE)
9359 || !validate_arg (arg2, POINTER_TYPE)
9360 || !validate_arg (len, INTEGER_TYPE))
9361 return NULL_TREE;
9362
9363 /* If the LEN parameter is zero, return zero. */
9364 if (integer_zerop (len))
9365 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9366 arg1, arg2);
9367
9368 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9369 if (operand_equal_p (arg1, arg2, 0))
9370 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9371
9372 p1 = c_getstr (arg1);
9373 p2 = c_getstr (arg2);
9374
9375 if (tree_fits_uhwi_p (len) && p1 && p2)
9376 {
9377 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9378 if (i > 0)
9379 return integer_one_node;
9380 else if (i < 0)
9381 return integer_minus_one_node;
9382 else
9383 return integer_zero_node;
9384 }
9385
9386 /* If the second arg is "", and the length is greater than zero,
9387 return *(const unsigned char*)arg1. */
9388 if (p2 && *p2 == '\0'
9389 && TREE_CODE (len) == INTEGER_CST
9390 && tree_int_cst_sgn (len) == 1)
9391 {
9392 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9393 tree cst_uchar_ptr_node
9394 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9395
9396 return fold_convert_loc (loc, integer_type_node,
9397 build1 (INDIRECT_REF, cst_uchar_node,
9398 fold_convert_loc (loc,
9399 cst_uchar_ptr_node,
9400 arg1)));
9401 }
9402
9403 /* If the first arg is "", and the length is greater than zero,
9404 return -*(const unsigned char*)arg2. */
9405 if (p1 && *p1 == '\0'
9406 && TREE_CODE (len) == INTEGER_CST
9407 && tree_int_cst_sgn (len) == 1)
9408 {
9409 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9410 tree cst_uchar_ptr_node
9411 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9412
9413 tree temp = fold_convert_loc (loc, integer_type_node,
9414 build1 (INDIRECT_REF, cst_uchar_node,
9415 fold_convert_loc (loc,
9416 cst_uchar_ptr_node,
9417 arg2)));
9418 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9419 }
9420
9421 /* If len parameter is one, return an expression corresponding to
9422 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9423 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9424 {
9425 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9426 tree cst_uchar_ptr_node
9427 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9428
9429 tree ind1 = fold_convert_loc (loc, integer_type_node,
9430 build1 (INDIRECT_REF, cst_uchar_node,
9431 fold_convert_loc (loc,
9432 cst_uchar_ptr_node,
9433 arg1)));
9434 tree ind2 = fold_convert_loc (loc, integer_type_node,
9435 build1 (INDIRECT_REF, cst_uchar_node,
9436 fold_convert_loc (loc,
9437 cst_uchar_ptr_node,
9438 arg2)));
9439 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9440 }
9441
9442 return NULL_TREE;
9443 }
9444
9445 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9446 ARG. Return NULL_TREE if no simplification can be made. */
9447
9448 static tree
9449 fold_builtin_signbit (location_t loc, tree arg, tree type)
9450 {
9451 if (!validate_arg (arg, REAL_TYPE))
9452 return NULL_TREE;
9453
9454 /* If ARG is a compile-time constant, determine the result. */
9455 if (TREE_CODE (arg) == REAL_CST
9456 && !TREE_OVERFLOW (arg))
9457 {
9458 REAL_VALUE_TYPE c;
9459
9460 c = TREE_REAL_CST (arg);
9461 return (REAL_VALUE_NEGATIVE (c)
9462 ? build_one_cst (type)
9463 : build_zero_cst (type));
9464 }
9465
9466 /* If ARG is non-negative, the result is always zero. */
9467 if (tree_expr_nonnegative_p (arg))
9468 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9469
9470 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9471 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9472 return fold_convert (type,
9473 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9474 build_real (TREE_TYPE (arg), dconst0)));
9475
9476 return NULL_TREE;
9477 }
9478
9479 /* Fold function call to builtin copysign, copysignf or copysignl with
9480 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9481 be made. */
9482
9483 static tree
9484 fold_builtin_copysign (location_t loc, tree fndecl,
9485 tree arg1, tree arg2, tree type)
9486 {
9487 tree tem;
9488
9489 if (!validate_arg (arg1, REAL_TYPE)
9490 || !validate_arg (arg2, REAL_TYPE))
9491 return NULL_TREE;
9492
9493 /* copysign(X,X) is X. */
9494 if (operand_equal_p (arg1, arg2, 0))
9495 return fold_convert_loc (loc, type, arg1);
9496
9497 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9498 if (TREE_CODE (arg1) == REAL_CST
9499 && TREE_CODE (arg2) == REAL_CST
9500 && !TREE_OVERFLOW (arg1)
9501 && !TREE_OVERFLOW (arg2))
9502 {
9503 REAL_VALUE_TYPE c1, c2;
9504
9505 c1 = TREE_REAL_CST (arg1);
9506 c2 = TREE_REAL_CST (arg2);
9507 /* c1.sign := c2.sign. */
9508 real_copysign (&c1, &c2);
9509 return build_real (type, c1);
9510 }
9511
9512 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9513 Remember to evaluate Y for side-effects. */
9514 if (tree_expr_nonnegative_p (arg2))
9515 return omit_one_operand_loc (loc, type,
9516 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9517 arg2);
9518
9519 /* Strip sign changing operations for the first argument. */
9520 tem = fold_strip_sign_ops (arg1);
9521 if (tem)
9522 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9523
9524 return NULL_TREE;
9525 }
9526
9527 /* Fold a call to builtin isascii with argument ARG. */
9528
9529 static tree
9530 fold_builtin_isascii (location_t loc, tree arg)
9531 {
9532 if (!validate_arg (arg, INTEGER_TYPE))
9533 return NULL_TREE;
9534 else
9535 {
9536 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9537 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9538 build_int_cst (integer_type_node,
9539 ~ (unsigned HOST_WIDE_INT) 0x7f));
9540 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9541 arg, integer_zero_node);
9542 }
9543 }
9544
9545 /* Fold a call to builtin toascii with argument ARG. */
9546
9547 static tree
9548 fold_builtin_toascii (location_t loc, tree arg)
9549 {
9550 if (!validate_arg (arg, INTEGER_TYPE))
9551 return NULL_TREE;
9552
9553 /* Transform toascii(c) -> (c & 0x7f). */
9554 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9555 build_int_cst (integer_type_node, 0x7f));
9556 }
9557
9558 /* Fold a call to builtin isdigit with argument ARG. */
9559
9560 static tree
9561 fold_builtin_isdigit (location_t loc, tree arg)
9562 {
9563 if (!validate_arg (arg, INTEGER_TYPE))
9564 return NULL_TREE;
9565 else
9566 {
9567 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9568 /* According to the C standard, isdigit is unaffected by locale.
9569 However, it definitely is affected by the target character set. */
9570 unsigned HOST_WIDE_INT target_digit0
9571 = lang_hooks.to_target_charset ('0');
9572
9573 if (target_digit0 == 0)
9574 return NULL_TREE;
9575
9576 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9577 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9578 build_int_cst (unsigned_type_node, target_digit0));
9579 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9580 build_int_cst (unsigned_type_node, 9));
9581 }
9582 }
9583
9584 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9585
9586 static tree
9587 fold_builtin_fabs (location_t loc, tree arg, tree type)
9588 {
9589 if (!validate_arg (arg, REAL_TYPE))
9590 return NULL_TREE;
9591
9592 arg = fold_convert_loc (loc, type, arg);
9593 if (TREE_CODE (arg) == REAL_CST)
9594 return fold_abs_const (arg, type);
9595 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9596 }
9597
9598 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9599
9600 static tree
9601 fold_builtin_abs (location_t loc, tree arg, tree type)
9602 {
9603 if (!validate_arg (arg, INTEGER_TYPE))
9604 return NULL_TREE;
9605
9606 arg = fold_convert_loc (loc, type, arg);
9607 if (TREE_CODE (arg) == INTEGER_CST)
9608 return fold_abs_const (arg, type);
9609 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9610 }
9611
9612 /* Fold a fma operation with arguments ARG[012]. */
9613
9614 tree
9615 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9616 tree type, tree arg0, tree arg1, tree arg2)
9617 {
9618 if (TREE_CODE (arg0) == REAL_CST
9619 && TREE_CODE (arg1) == REAL_CST
9620 && TREE_CODE (arg2) == REAL_CST)
9621 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9622
9623 return NULL_TREE;
9624 }
9625
9626 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9627
9628 static tree
9629 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9630 {
9631 if (validate_arg (arg0, REAL_TYPE)
9632 && validate_arg (arg1, REAL_TYPE)
9633 && validate_arg (arg2, REAL_TYPE))
9634 {
9635 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9636 if (tem)
9637 return tem;
9638
9639 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9640 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9641 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9642 }
9643 return NULL_TREE;
9644 }
9645
9646 /* Fold a call to builtin fmin or fmax. */
9647
9648 static tree
9649 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9650 tree type, bool max)
9651 {
9652 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9653 {
9654 /* Calculate the result when the argument is a constant. */
9655 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9656
9657 if (res)
9658 return res;
9659
9660 /* If either argument is NaN, return the other one. Avoid the
9661 transformation if we get (and honor) a signalling NaN. Using
9662 omit_one_operand() ensures we create a non-lvalue. */
9663 if (TREE_CODE (arg0) == REAL_CST
9664 && real_isnan (&TREE_REAL_CST (arg0))
9665 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9666 || ! TREE_REAL_CST (arg0).signalling))
9667 return omit_one_operand_loc (loc, type, arg1, arg0);
9668 if (TREE_CODE (arg1) == REAL_CST
9669 && real_isnan (&TREE_REAL_CST (arg1))
9670 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9671 || ! TREE_REAL_CST (arg1).signalling))
9672 return omit_one_operand_loc (loc, type, arg0, arg1);
9673
9674 /* Transform fmin/fmax(x,x) -> x. */
9675 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9676 return omit_one_operand_loc (loc, type, arg0, arg1);
9677
9678 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9679 functions to return the numeric arg if the other one is NaN.
9680 These tree codes don't honor that, so only transform if
9681 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9682 handled, so we don't have to worry about it either. */
9683 if (flag_finite_math_only)
9684 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9685 fold_convert_loc (loc, type, arg0),
9686 fold_convert_loc (loc, type, arg1));
9687 }
9688 return NULL_TREE;
9689 }
9690
9691 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9692
9693 static tree
9694 fold_builtin_carg (location_t loc, tree arg, tree type)
9695 {
9696 if (validate_arg (arg, COMPLEX_TYPE)
9697 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9698 {
9699 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9700
9701 if (atan2_fn)
9702 {
9703 tree new_arg = builtin_save_expr (arg);
9704 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9705 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9706 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9707 }
9708 }
9709
9710 return NULL_TREE;
9711 }
9712
9713 /* Fold a call to builtin logb/ilogb. */
9714
9715 static tree
9716 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9717 {
9718 if (! validate_arg (arg, REAL_TYPE))
9719 return NULL_TREE;
9720
9721 STRIP_NOPS (arg);
9722
9723 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9724 {
9725 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9726
9727 switch (value->cl)
9728 {
9729 case rvc_nan:
9730 case rvc_inf:
9731 /* If arg is Inf or NaN and we're logb, return it. */
9732 if (TREE_CODE (rettype) == REAL_TYPE)
9733 {
9734 /* For logb(-Inf) we have to return +Inf. */
9735 if (real_isinf (value) && real_isneg (value))
9736 {
9737 REAL_VALUE_TYPE tem;
9738 real_inf (&tem);
9739 return build_real (rettype, tem);
9740 }
9741 return fold_convert_loc (loc, rettype, arg);
9742 }
9743 /* Fall through... */
9744 case rvc_zero:
9745 /* Zero may set errno and/or raise an exception for logb, also
9746 for ilogb we don't know FP_ILOGB0. */
9747 return NULL_TREE;
9748 case rvc_normal:
9749 /* For normal numbers, proceed iff radix == 2. In GCC,
9750 normalized significands are in the range [0.5, 1.0). We
9751 want the exponent as if they were [1.0, 2.0) so get the
9752 exponent and subtract 1. */
9753 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9754 return fold_convert_loc (loc, rettype,
9755 build_int_cst (integer_type_node,
9756 REAL_EXP (value)-1));
9757 break;
9758 }
9759 }
9760
9761 return NULL_TREE;
9762 }
9763
9764 /* Fold a call to builtin significand, if radix == 2. */
9765
9766 static tree
9767 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9768 {
9769 if (! validate_arg (arg, REAL_TYPE))
9770 return NULL_TREE;
9771
9772 STRIP_NOPS (arg);
9773
9774 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9775 {
9776 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9777
9778 switch (value->cl)
9779 {
9780 case rvc_zero:
9781 case rvc_nan:
9782 case rvc_inf:
9783 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9784 return fold_convert_loc (loc, rettype, arg);
9785 case rvc_normal:
9786 /* For normal numbers, proceed iff radix == 2. */
9787 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9788 {
9789 REAL_VALUE_TYPE result = *value;
9790 /* In GCC, normalized significands are in the range [0.5,
9791 1.0). We want them to be [1.0, 2.0) so set the
9792 exponent to 1. */
9793 SET_REAL_EXP (&result, 1);
9794 return build_real (rettype, result);
9795 }
9796 break;
9797 }
9798 }
9799
9800 return NULL_TREE;
9801 }
9802
9803 /* Fold a call to builtin frexp, we can assume the base is 2. */
9804
9805 static tree
9806 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9807 {
9808 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9809 return NULL_TREE;
9810
9811 STRIP_NOPS (arg0);
9812
9813 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9814 return NULL_TREE;
9815
9816 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9817
9818 /* Proceed if a valid pointer type was passed in. */
9819 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9820 {
9821 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9822 tree frac, exp;
9823
9824 switch (value->cl)
9825 {
9826 case rvc_zero:
9827 /* For +-0, return (*exp = 0, +-0). */
9828 exp = integer_zero_node;
9829 frac = arg0;
9830 break;
9831 case rvc_nan:
9832 case rvc_inf:
9833 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9834 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9835 case rvc_normal:
9836 {
9837 /* Since the frexp function always expects base 2, and in
9838 GCC normalized significands are already in the range
9839 [0.5, 1.0), we have exactly what frexp wants. */
9840 REAL_VALUE_TYPE frac_rvt = *value;
9841 SET_REAL_EXP (&frac_rvt, 0);
9842 frac = build_real (rettype, frac_rvt);
9843 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9844 }
9845 break;
9846 default:
9847 gcc_unreachable ();
9848 }
9849
9850 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9851 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9852 TREE_SIDE_EFFECTS (arg1) = 1;
9853 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9854 }
9855
9856 return NULL_TREE;
9857 }
9858
9859 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9860 then we can assume the base is two. If it's false, then we have to
9861 check the mode of the TYPE parameter in certain cases. */
9862
9863 static tree
9864 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9865 tree type, bool ldexp)
9866 {
9867 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9868 {
9869 STRIP_NOPS (arg0);
9870 STRIP_NOPS (arg1);
9871
9872 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9873 if (real_zerop (arg0) || integer_zerop (arg1)
9874 || (TREE_CODE (arg0) == REAL_CST
9875 && !real_isfinite (&TREE_REAL_CST (arg0))))
9876 return omit_one_operand_loc (loc, type, arg0, arg1);
9877
9878 /* If both arguments are constant, then try to evaluate it. */
9879 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9880 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9881 && tree_fits_shwi_p (arg1))
9882 {
9883 /* Bound the maximum adjustment to twice the range of the
9884 mode's valid exponents. Use abs to ensure the range is
9885 positive as a sanity check. */
9886 const long max_exp_adj = 2 *
9887 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9888 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9889
9890 /* Get the user-requested adjustment. */
9891 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9892
9893 /* The requested adjustment must be inside this range. This
9894 is a preliminary cap to avoid things like overflow, we
9895 may still fail to compute the result for other reasons. */
9896 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9897 {
9898 REAL_VALUE_TYPE initial_result;
9899
9900 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9901
9902 /* Ensure we didn't overflow. */
9903 if (! real_isinf (&initial_result))
9904 {
9905 const REAL_VALUE_TYPE trunc_result
9906 = real_value_truncate (TYPE_MODE (type), initial_result);
9907
9908 /* Only proceed if the target mode can hold the
9909 resulting value. */
9910 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9911 return build_real (type, trunc_result);
9912 }
9913 }
9914 }
9915 }
9916
9917 return NULL_TREE;
9918 }
9919
9920 /* Fold a call to builtin modf. */
9921
9922 static tree
9923 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9924 {
9925 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9926 return NULL_TREE;
9927
9928 STRIP_NOPS (arg0);
9929
9930 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9931 return NULL_TREE;
9932
9933 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9934
9935 /* Proceed if a valid pointer type was passed in. */
9936 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9937 {
9938 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9939 REAL_VALUE_TYPE trunc, frac;
9940
9941 switch (value->cl)
9942 {
9943 case rvc_nan:
9944 case rvc_zero:
9945 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9946 trunc = frac = *value;
9947 break;
9948 case rvc_inf:
9949 /* For +-Inf, return (*arg1 = arg0, +-0). */
9950 frac = dconst0;
9951 frac.sign = value->sign;
9952 trunc = *value;
9953 break;
9954 case rvc_normal:
9955 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9956 real_trunc (&trunc, VOIDmode, value);
9957 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9958 /* If the original number was negative and already
9959 integral, then the fractional part is -0.0. */
9960 if (value->sign && frac.cl == rvc_zero)
9961 frac.sign = value->sign;
9962 break;
9963 }
9964
9965 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9966 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9967 build_real (rettype, trunc));
9968 TREE_SIDE_EFFECTS (arg1) = 1;
9969 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9970 build_real (rettype, frac));
9971 }
9972
9973 return NULL_TREE;
9974 }
9975
9976 /* Given a location LOC, an interclass builtin function decl FNDECL
9977 and its single argument ARG, return an folded expression computing
9978 the same, or NULL_TREE if we either couldn't or didn't want to fold
9979 (the latter happen if there's an RTL instruction available). */
9980
9981 static tree
9982 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9983 {
9984 enum machine_mode mode;
9985
9986 if (!validate_arg (arg, REAL_TYPE))
9987 return NULL_TREE;
9988
9989 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9990 return NULL_TREE;
9991
9992 mode = TYPE_MODE (TREE_TYPE (arg));
9993
9994 /* If there is no optab, try generic code. */
9995 switch (DECL_FUNCTION_CODE (fndecl))
9996 {
9997 tree result;
9998
9999 CASE_FLT_FN (BUILT_IN_ISINF):
10000 {
10001 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10002 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10003 tree const type = TREE_TYPE (arg);
10004 REAL_VALUE_TYPE r;
10005 char buf[128];
10006
10007 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10008 real_from_string (&r, buf);
10009 result = build_call_expr (isgr_fn, 2,
10010 fold_build1_loc (loc, ABS_EXPR, type, arg),
10011 build_real (type, r));
10012 return result;
10013 }
10014 CASE_FLT_FN (BUILT_IN_FINITE):
10015 case BUILT_IN_ISFINITE:
10016 {
10017 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10018 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10019 tree const type = TREE_TYPE (arg);
10020 REAL_VALUE_TYPE r;
10021 char buf[128];
10022
10023 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10024 real_from_string (&r, buf);
10025 result = build_call_expr (isle_fn, 2,
10026 fold_build1_loc (loc, ABS_EXPR, type, arg),
10027 build_real (type, r));
10028 /*result = fold_build2_loc (loc, UNGT_EXPR,
10029 TREE_TYPE (TREE_TYPE (fndecl)),
10030 fold_build1_loc (loc, ABS_EXPR, type, arg),
10031 build_real (type, r));
10032 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10033 TREE_TYPE (TREE_TYPE (fndecl)),
10034 result);*/
10035 return result;
10036 }
10037 case BUILT_IN_ISNORMAL:
10038 {
10039 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10040 islessequal(fabs(x),DBL_MAX). */
10041 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10042 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10043 tree const type = TREE_TYPE (arg);
10044 REAL_VALUE_TYPE rmax, rmin;
10045 char buf[128];
10046
10047 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10048 real_from_string (&rmax, buf);
10049 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10050 real_from_string (&rmin, buf);
10051 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10052 result = build_call_expr (isle_fn, 2, arg,
10053 build_real (type, rmax));
10054 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10055 build_call_expr (isge_fn, 2, arg,
10056 build_real (type, rmin)));
10057 return result;
10058 }
10059 default:
10060 break;
10061 }
10062
10063 return NULL_TREE;
10064 }
10065
10066 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10067 ARG is the argument for the call. */
10068
10069 static tree
10070 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10071 {
10072 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10073 REAL_VALUE_TYPE r;
10074
10075 if (!validate_arg (arg, REAL_TYPE))
10076 return NULL_TREE;
10077
10078 switch (builtin_index)
10079 {
10080 case BUILT_IN_ISINF:
10081 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10082 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10083
10084 if (TREE_CODE (arg) == REAL_CST)
10085 {
10086 r = TREE_REAL_CST (arg);
10087 if (real_isinf (&r))
10088 return real_compare (GT_EXPR, &r, &dconst0)
10089 ? integer_one_node : integer_minus_one_node;
10090 else
10091 return integer_zero_node;
10092 }
10093
10094 return NULL_TREE;
10095
10096 case BUILT_IN_ISINF_SIGN:
10097 {
10098 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10099 /* In a boolean context, GCC will fold the inner COND_EXPR to
10100 1. So e.g. "if (isinf_sign(x))" would be folded to just
10101 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10102 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10103 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10104 tree tmp = NULL_TREE;
10105
10106 arg = builtin_save_expr (arg);
10107
10108 if (signbit_fn && isinf_fn)
10109 {
10110 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10111 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10112
10113 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10114 signbit_call, integer_zero_node);
10115 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10116 isinf_call, integer_zero_node);
10117
10118 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10119 integer_minus_one_node, integer_one_node);
10120 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10121 isinf_call, tmp,
10122 integer_zero_node);
10123 }
10124
10125 return tmp;
10126 }
10127
10128 case BUILT_IN_ISFINITE:
10129 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10130 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10131 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10132
10133 if (TREE_CODE (arg) == REAL_CST)
10134 {
10135 r = TREE_REAL_CST (arg);
10136 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10137 }
10138
10139 return NULL_TREE;
10140
10141 case BUILT_IN_ISNAN:
10142 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10143 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10144
10145 if (TREE_CODE (arg) == REAL_CST)
10146 {
10147 r = TREE_REAL_CST (arg);
10148 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10149 }
10150
10151 arg = builtin_save_expr (arg);
10152 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10153
10154 default:
10155 gcc_unreachable ();
10156 }
10157 }
10158
10159 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10160 This builtin will generate code to return the appropriate floating
10161 point classification depending on the value of the floating point
10162 number passed in. The possible return values must be supplied as
10163 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10164 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10165 one floating point argument which is "type generic". */
10166
10167 static tree
10168 fold_builtin_fpclassify (location_t loc, tree exp)
10169 {
10170 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10171 arg, type, res, tmp;
10172 enum machine_mode mode;
10173 REAL_VALUE_TYPE r;
10174 char buf[128];
10175
10176 /* Verify the required arguments in the original call. */
10177 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10178 INTEGER_TYPE, INTEGER_TYPE,
10179 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10180 return NULL_TREE;
10181
10182 fp_nan = CALL_EXPR_ARG (exp, 0);
10183 fp_infinite = CALL_EXPR_ARG (exp, 1);
10184 fp_normal = CALL_EXPR_ARG (exp, 2);
10185 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10186 fp_zero = CALL_EXPR_ARG (exp, 4);
10187 arg = CALL_EXPR_ARG (exp, 5);
10188 type = TREE_TYPE (arg);
10189 mode = TYPE_MODE (type);
10190 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10191
10192 /* fpclassify(x) ->
10193 isnan(x) ? FP_NAN :
10194 (fabs(x) == Inf ? FP_INFINITE :
10195 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10196 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10197
10198 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10199 build_real (type, dconst0));
10200 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10201 tmp, fp_zero, fp_subnormal);
10202
10203 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10204 real_from_string (&r, buf);
10205 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10206 arg, build_real (type, r));
10207 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10208
10209 if (HONOR_INFINITIES (mode))
10210 {
10211 real_inf (&r);
10212 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10213 build_real (type, r));
10214 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10215 fp_infinite, res);
10216 }
10217
10218 if (HONOR_NANS (mode))
10219 {
10220 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10221 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10222 }
10223
10224 return res;
10225 }
10226
10227 /* Fold a call to an unordered comparison function such as
10228 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10229 being called and ARG0 and ARG1 are the arguments for the call.
10230 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10231 the opposite of the desired result. UNORDERED_CODE is used
10232 for modes that can hold NaNs and ORDERED_CODE is used for
10233 the rest. */
10234
10235 static tree
10236 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10237 enum tree_code unordered_code,
10238 enum tree_code ordered_code)
10239 {
10240 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10241 enum tree_code code;
10242 tree type0, type1;
10243 enum tree_code code0, code1;
10244 tree cmp_type = NULL_TREE;
10245
10246 type0 = TREE_TYPE (arg0);
10247 type1 = TREE_TYPE (arg1);
10248
10249 code0 = TREE_CODE (type0);
10250 code1 = TREE_CODE (type1);
10251
10252 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10253 /* Choose the wider of two real types. */
10254 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10255 ? type0 : type1;
10256 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10257 cmp_type = type0;
10258 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10259 cmp_type = type1;
10260
10261 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10262 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10263
10264 if (unordered_code == UNORDERED_EXPR)
10265 {
10266 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10267 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10268 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10269 }
10270
10271 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10272 : ordered_code;
10273 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10274 fold_build2_loc (loc, code, type, arg0, arg1));
10275 }
10276
10277 /* Fold a call to built-in function FNDECL with 0 arguments.
10278 IGNORE is true if the result of the function call is ignored. This
10279 function returns NULL_TREE if no simplification was possible. */
10280
10281 static tree
10282 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10283 {
10284 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10285 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10286 switch (fcode)
10287 {
10288 CASE_FLT_FN (BUILT_IN_INF):
10289 case BUILT_IN_INFD32:
10290 case BUILT_IN_INFD64:
10291 case BUILT_IN_INFD128:
10292 return fold_builtin_inf (loc, type, true);
10293
10294 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10295 return fold_builtin_inf (loc, type, false);
10296
10297 case BUILT_IN_CLASSIFY_TYPE:
10298 return fold_builtin_classify_type (NULL_TREE);
10299
10300 case BUILT_IN_UNREACHABLE:
10301 if (flag_sanitize & SANITIZE_UNREACHABLE
10302 && (current_function_decl == NULL
10303 || !lookup_attribute ("no_sanitize_undefined",
10304 DECL_ATTRIBUTES (current_function_decl))))
10305 return ubsan_instrument_unreachable (loc);
10306 break;
10307
10308 default:
10309 break;
10310 }
10311 return NULL_TREE;
10312 }
10313
10314 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10315 IGNORE is true if the result of the function call is ignored. This
10316 function returns NULL_TREE if no simplification was possible. */
10317
10318 static tree
10319 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10320 {
10321 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10322 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10323 switch (fcode)
10324 {
10325 case BUILT_IN_CONSTANT_P:
10326 {
10327 tree val = fold_builtin_constant_p (arg0);
10328
10329 /* Gimplification will pull the CALL_EXPR for the builtin out of
10330 an if condition. When not optimizing, we'll not CSE it back.
10331 To avoid link error types of regressions, return false now. */
10332 if (!val && !optimize)
10333 val = integer_zero_node;
10334
10335 return val;
10336 }
10337
10338 case BUILT_IN_CLASSIFY_TYPE:
10339 return fold_builtin_classify_type (arg0);
10340
10341 case BUILT_IN_STRLEN:
10342 return fold_builtin_strlen (loc, type, arg0);
10343
10344 CASE_FLT_FN (BUILT_IN_FABS):
10345 case BUILT_IN_FABSD32:
10346 case BUILT_IN_FABSD64:
10347 case BUILT_IN_FABSD128:
10348 return fold_builtin_fabs (loc, arg0, type);
10349
10350 case BUILT_IN_ABS:
10351 case BUILT_IN_LABS:
10352 case BUILT_IN_LLABS:
10353 case BUILT_IN_IMAXABS:
10354 return fold_builtin_abs (loc, arg0, type);
10355
10356 CASE_FLT_FN (BUILT_IN_CONJ):
10357 if (validate_arg (arg0, COMPLEX_TYPE)
10358 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10359 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10360 break;
10361
10362 CASE_FLT_FN (BUILT_IN_CREAL):
10363 if (validate_arg (arg0, COMPLEX_TYPE)
10364 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10365 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10366 break;
10367
10368 CASE_FLT_FN (BUILT_IN_CIMAG):
10369 if (validate_arg (arg0, COMPLEX_TYPE)
10370 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10371 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10372 break;
10373
10374 CASE_FLT_FN (BUILT_IN_CCOS):
10375 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10376
10377 CASE_FLT_FN (BUILT_IN_CCOSH):
10378 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10379
10380 CASE_FLT_FN (BUILT_IN_CPROJ):
10381 return fold_builtin_cproj (loc, arg0, type);
10382
10383 CASE_FLT_FN (BUILT_IN_CSIN):
10384 if (validate_arg (arg0, COMPLEX_TYPE)
10385 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10386 return do_mpc_arg1 (arg0, type, mpc_sin);
10387 break;
10388
10389 CASE_FLT_FN (BUILT_IN_CSINH):
10390 if (validate_arg (arg0, COMPLEX_TYPE)
10391 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10392 return do_mpc_arg1 (arg0, type, mpc_sinh);
10393 break;
10394
10395 CASE_FLT_FN (BUILT_IN_CTAN):
10396 if (validate_arg (arg0, COMPLEX_TYPE)
10397 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10398 return do_mpc_arg1 (arg0, type, mpc_tan);
10399 break;
10400
10401 CASE_FLT_FN (BUILT_IN_CTANH):
10402 if (validate_arg (arg0, COMPLEX_TYPE)
10403 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10404 return do_mpc_arg1 (arg0, type, mpc_tanh);
10405 break;
10406
10407 CASE_FLT_FN (BUILT_IN_CLOG):
10408 if (validate_arg (arg0, COMPLEX_TYPE)
10409 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10410 return do_mpc_arg1 (arg0, type, mpc_log);
10411 break;
10412
10413 CASE_FLT_FN (BUILT_IN_CSQRT):
10414 if (validate_arg (arg0, COMPLEX_TYPE)
10415 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10416 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10417 break;
10418
10419 CASE_FLT_FN (BUILT_IN_CASIN):
10420 if (validate_arg (arg0, COMPLEX_TYPE)
10421 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10422 return do_mpc_arg1 (arg0, type, mpc_asin);
10423 break;
10424
10425 CASE_FLT_FN (BUILT_IN_CACOS):
10426 if (validate_arg (arg0, COMPLEX_TYPE)
10427 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10428 return do_mpc_arg1 (arg0, type, mpc_acos);
10429 break;
10430
10431 CASE_FLT_FN (BUILT_IN_CATAN):
10432 if (validate_arg (arg0, COMPLEX_TYPE)
10433 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10434 return do_mpc_arg1 (arg0, type, mpc_atan);
10435 break;
10436
10437 CASE_FLT_FN (BUILT_IN_CASINH):
10438 if (validate_arg (arg0, COMPLEX_TYPE)
10439 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10440 return do_mpc_arg1 (arg0, type, mpc_asinh);
10441 break;
10442
10443 CASE_FLT_FN (BUILT_IN_CACOSH):
10444 if (validate_arg (arg0, COMPLEX_TYPE)
10445 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10446 return do_mpc_arg1 (arg0, type, mpc_acosh);
10447 break;
10448
10449 CASE_FLT_FN (BUILT_IN_CATANH):
10450 if (validate_arg (arg0, COMPLEX_TYPE)
10451 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10452 return do_mpc_arg1 (arg0, type, mpc_atanh);
10453 break;
10454
10455 CASE_FLT_FN (BUILT_IN_CABS):
10456 return fold_builtin_cabs (loc, arg0, type, fndecl);
10457
10458 CASE_FLT_FN (BUILT_IN_CARG):
10459 return fold_builtin_carg (loc, arg0, type);
10460
10461 CASE_FLT_FN (BUILT_IN_SQRT):
10462 return fold_builtin_sqrt (loc, arg0, type);
10463
10464 CASE_FLT_FN (BUILT_IN_CBRT):
10465 return fold_builtin_cbrt (loc, arg0, type);
10466
10467 CASE_FLT_FN (BUILT_IN_ASIN):
10468 if (validate_arg (arg0, REAL_TYPE))
10469 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10470 &dconstm1, &dconst1, true);
10471 break;
10472
10473 CASE_FLT_FN (BUILT_IN_ACOS):
10474 if (validate_arg (arg0, REAL_TYPE))
10475 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10476 &dconstm1, &dconst1, true);
10477 break;
10478
10479 CASE_FLT_FN (BUILT_IN_ATAN):
10480 if (validate_arg (arg0, REAL_TYPE))
10481 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10482 break;
10483
10484 CASE_FLT_FN (BUILT_IN_ASINH):
10485 if (validate_arg (arg0, REAL_TYPE))
10486 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10487 break;
10488
10489 CASE_FLT_FN (BUILT_IN_ACOSH):
10490 if (validate_arg (arg0, REAL_TYPE))
10491 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10492 &dconst1, NULL, true);
10493 break;
10494
10495 CASE_FLT_FN (BUILT_IN_ATANH):
10496 if (validate_arg (arg0, REAL_TYPE))
10497 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10498 &dconstm1, &dconst1, false);
10499 break;
10500
10501 CASE_FLT_FN (BUILT_IN_SIN):
10502 if (validate_arg (arg0, REAL_TYPE))
10503 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10504 break;
10505
10506 CASE_FLT_FN (BUILT_IN_COS):
10507 return fold_builtin_cos (loc, arg0, type, fndecl);
10508
10509 CASE_FLT_FN (BUILT_IN_TAN):
10510 return fold_builtin_tan (arg0, type);
10511
10512 CASE_FLT_FN (BUILT_IN_CEXP):
10513 return fold_builtin_cexp (loc, arg0, type);
10514
10515 CASE_FLT_FN (BUILT_IN_CEXPI):
10516 if (validate_arg (arg0, REAL_TYPE))
10517 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10518 break;
10519
10520 CASE_FLT_FN (BUILT_IN_SINH):
10521 if (validate_arg (arg0, REAL_TYPE))
10522 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10523 break;
10524
10525 CASE_FLT_FN (BUILT_IN_COSH):
10526 return fold_builtin_cosh (loc, arg0, type, fndecl);
10527
10528 CASE_FLT_FN (BUILT_IN_TANH):
10529 if (validate_arg (arg0, REAL_TYPE))
10530 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10531 break;
10532
10533 CASE_FLT_FN (BUILT_IN_ERF):
10534 if (validate_arg (arg0, REAL_TYPE))
10535 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10536 break;
10537
10538 CASE_FLT_FN (BUILT_IN_ERFC):
10539 if (validate_arg (arg0, REAL_TYPE))
10540 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10541 break;
10542
10543 CASE_FLT_FN (BUILT_IN_TGAMMA):
10544 if (validate_arg (arg0, REAL_TYPE))
10545 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10546 break;
10547
10548 CASE_FLT_FN (BUILT_IN_EXP):
10549 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10550
10551 CASE_FLT_FN (BUILT_IN_EXP2):
10552 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10553
10554 CASE_FLT_FN (BUILT_IN_EXP10):
10555 CASE_FLT_FN (BUILT_IN_POW10):
10556 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10557
10558 CASE_FLT_FN (BUILT_IN_EXPM1):
10559 if (validate_arg (arg0, REAL_TYPE))
10560 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10561 break;
10562
10563 CASE_FLT_FN (BUILT_IN_LOG):
10564 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10565
10566 CASE_FLT_FN (BUILT_IN_LOG2):
10567 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10568
10569 CASE_FLT_FN (BUILT_IN_LOG10):
10570 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10571
10572 CASE_FLT_FN (BUILT_IN_LOG1P):
10573 if (validate_arg (arg0, REAL_TYPE))
10574 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10575 &dconstm1, NULL, false);
10576 break;
10577
10578 CASE_FLT_FN (BUILT_IN_J0):
10579 if (validate_arg (arg0, REAL_TYPE))
10580 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10581 NULL, NULL, 0);
10582 break;
10583
10584 CASE_FLT_FN (BUILT_IN_J1):
10585 if (validate_arg (arg0, REAL_TYPE))
10586 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10587 NULL, NULL, 0);
10588 break;
10589
10590 CASE_FLT_FN (BUILT_IN_Y0):
10591 if (validate_arg (arg0, REAL_TYPE))
10592 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10593 &dconst0, NULL, false);
10594 break;
10595
10596 CASE_FLT_FN (BUILT_IN_Y1):
10597 if (validate_arg (arg0, REAL_TYPE))
10598 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10599 &dconst0, NULL, false);
10600 break;
10601
10602 CASE_FLT_FN (BUILT_IN_NAN):
10603 case BUILT_IN_NAND32:
10604 case BUILT_IN_NAND64:
10605 case BUILT_IN_NAND128:
10606 return fold_builtin_nan (arg0, type, true);
10607
10608 CASE_FLT_FN (BUILT_IN_NANS):
10609 return fold_builtin_nan (arg0, type, false);
10610
10611 CASE_FLT_FN (BUILT_IN_FLOOR):
10612 return fold_builtin_floor (loc, fndecl, arg0);
10613
10614 CASE_FLT_FN (BUILT_IN_CEIL):
10615 return fold_builtin_ceil (loc, fndecl, arg0);
10616
10617 CASE_FLT_FN (BUILT_IN_TRUNC):
10618 return fold_builtin_trunc (loc, fndecl, arg0);
10619
10620 CASE_FLT_FN (BUILT_IN_ROUND):
10621 return fold_builtin_round (loc, fndecl, arg0);
10622
10623 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10624 CASE_FLT_FN (BUILT_IN_RINT):
10625 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10626
10627 CASE_FLT_FN (BUILT_IN_ICEIL):
10628 CASE_FLT_FN (BUILT_IN_LCEIL):
10629 CASE_FLT_FN (BUILT_IN_LLCEIL):
10630 CASE_FLT_FN (BUILT_IN_LFLOOR):
10631 CASE_FLT_FN (BUILT_IN_IFLOOR):
10632 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10633 CASE_FLT_FN (BUILT_IN_IROUND):
10634 CASE_FLT_FN (BUILT_IN_LROUND):
10635 CASE_FLT_FN (BUILT_IN_LLROUND):
10636 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10637
10638 CASE_FLT_FN (BUILT_IN_IRINT):
10639 CASE_FLT_FN (BUILT_IN_LRINT):
10640 CASE_FLT_FN (BUILT_IN_LLRINT):
10641 return fold_fixed_mathfn (loc, fndecl, arg0);
10642
10643 case BUILT_IN_BSWAP16:
10644 case BUILT_IN_BSWAP32:
10645 case BUILT_IN_BSWAP64:
10646 return fold_builtin_bswap (fndecl, arg0);
10647
10648 CASE_INT_FN (BUILT_IN_FFS):
10649 CASE_INT_FN (BUILT_IN_CLZ):
10650 CASE_INT_FN (BUILT_IN_CTZ):
10651 CASE_INT_FN (BUILT_IN_CLRSB):
10652 CASE_INT_FN (BUILT_IN_POPCOUNT):
10653 CASE_INT_FN (BUILT_IN_PARITY):
10654 return fold_builtin_bitop (fndecl, arg0);
10655
10656 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10657 return fold_builtin_signbit (loc, arg0, type);
10658
10659 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10660 return fold_builtin_significand (loc, arg0, type);
10661
10662 CASE_FLT_FN (BUILT_IN_ILOGB):
10663 CASE_FLT_FN (BUILT_IN_LOGB):
10664 return fold_builtin_logb (loc, arg0, type);
10665
10666 case BUILT_IN_ISASCII:
10667 return fold_builtin_isascii (loc, arg0);
10668
10669 case BUILT_IN_TOASCII:
10670 return fold_builtin_toascii (loc, arg0);
10671
10672 case BUILT_IN_ISDIGIT:
10673 return fold_builtin_isdigit (loc, arg0);
10674
10675 CASE_FLT_FN (BUILT_IN_FINITE):
10676 case BUILT_IN_FINITED32:
10677 case BUILT_IN_FINITED64:
10678 case BUILT_IN_FINITED128:
10679 case BUILT_IN_ISFINITE:
10680 {
10681 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10682 if (ret)
10683 return ret;
10684 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10685 }
10686
10687 CASE_FLT_FN (BUILT_IN_ISINF):
10688 case BUILT_IN_ISINFD32:
10689 case BUILT_IN_ISINFD64:
10690 case BUILT_IN_ISINFD128:
10691 {
10692 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10693 if (ret)
10694 return ret;
10695 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10696 }
10697
10698 case BUILT_IN_ISNORMAL:
10699 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10700
10701 case BUILT_IN_ISINF_SIGN:
10702 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10703
10704 CASE_FLT_FN (BUILT_IN_ISNAN):
10705 case BUILT_IN_ISNAND32:
10706 case BUILT_IN_ISNAND64:
10707 case BUILT_IN_ISNAND128:
10708 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10709
10710 case BUILT_IN_PRINTF:
10711 case BUILT_IN_PRINTF_UNLOCKED:
10712 case BUILT_IN_VPRINTF:
10713 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10714
10715 case BUILT_IN_FREE:
10716 if (integer_zerop (arg0))
10717 return build_empty_stmt (loc);
10718 break;
10719
10720 default:
10721 break;
10722 }
10723
10724 return NULL_TREE;
10725
10726 }
10727
10728 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10729 IGNORE is true if the result of the function call is ignored. This
10730 function returns NULL_TREE if no simplification was possible. */
10731
10732 static tree
10733 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10734 {
10735 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10736 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10737
10738 switch (fcode)
10739 {
10740 CASE_FLT_FN (BUILT_IN_JN):
10741 if (validate_arg (arg0, INTEGER_TYPE)
10742 && validate_arg (arg1, REAL_TYPE))
10743 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10744 break;
10745
10746 CASE_FLT_FN (BUILT_IN_YN):
10747 if (validate_arg (arg0, INTEGER_TYPE)
10748 && validate_arg (arg1, REAL_TYPE))
10749 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10750 &dconst0, false);
10751 break;
10752
10753 CASE_FLT_FN (BUILT_IN_DREM):
10754 CASE_FLT_FN (BUILT_IN_REMAINDER):
10755 if (validate_arg (arg0, REAL_TYPE)
10756 && validate_arg (arg1, REAL_TYPE))
10757 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10758 break;
10759
10760 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10761 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10762 if (validate_arg (arg0, REAL_TYPE)
10763 && validate_arg (arg1, POINTER_TYPE))
10764 return do_mpfr_lgamma_r (arg0, arg1, type);
10765 break;
10766
10767 CASE_FLT_FN (BUILT_IN_ATAN2):
10768 if (validate_arg (arg0, REAL_TYPE)
10769 && validate_arg (arg1, REAL_TYPE))
10770 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10771 break;
10772
10773 CASE_FLT_FN (BUILT_IN_FDIM):
10774 if (validate_arg (arg0, REAL_TYPE)
10775 && validate_arg (arg1, REAL_TYPE))
10776 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10777 break;
10778
10779 CASE_FLT_FN (BUILT_IN_HYPOT):
10780 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10781
10782 CASE_FLT_FN (BUILT_IN_CPOW):
10783 if (validate_arg (arg0, COMPLEX_TYPE)
10784 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10785 && validate_arg (arg1, COMPLEX_TYPE)
10786 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10787 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10788 break;
10789
10790 CASE_FLT_FN (BUILT_IN_LDEXP):
10791 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10792 CASE_FLT_FN (BUILT_IN_SCALBN):
10793 CASE_FLT_FN (BUILT_IN_SCALBLN):
10794 return fold_builtin_load_exponent (loc, arg0, arg1,
10795 type, /*ldexp=*/false);
10796
10797 CASE_FLT_FN (BUILT_IN_FREXP):
10798 return fold_builtin_frexp (loc, arg0, arg1, type);
10799
10800 CASE_FLT_FN (BUILT_IN_MODF):
10801 return fold_builtin_modf (loc, arg0, arg1, type);
10802
10803 case BUILT_IN_BZERO:
10804 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10805
10806 case BUILT_IN_FPUTS:
10807 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10808
10809 case BUILT_IN_FPUTS_UNLOCKED:
10810 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10811
10812 case BUILT_IN_STRSTR:
10813 return fold_builtin_strstr (loc, arg0, arg1, type);
10814
10815 case BUILT_IN_STRCAT:
10816 return fold_builtin_strcat (loc, arg0, arg1, NULL_TREE);
10817
10818 case BUILT_IN_STRSPN:
10819 return fold_builtin_strspn (loc, arg0, arg1);
10820
10821 case BUILT_IN_STRCSPN:
10822 return fold_builtin_strcspn (loc, arg0, arg1);
10823
10824 case BUILT_IN_STRCHR:
10825 case BUILT_IN_INDEX:
10826 return fold_builtin_strchr (loc, arg0, arg1, type);
10827
10828 case BUILT_IN_STRRCHR:
10829 case BUILT_IN_RINDEX:
10830 return fold_builtin_strrchr (loc, arg0, arg1, type);
10831
10832 case BUILT_IN_STRCPY:
10833 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10834
10835 case BUILT_IN_STPCPY:
10836 if (ignore)
10837 {
10838 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10839 if (!fn)
10840 break;
10841
10842 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10843 }
10844 else
10845 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10846 break;
10847
10848 case BUILT_IN_STRCMP:
10849 return fold_builtin_strcmp (loc, arg0, arg1);
10850
10851 case BUILT_IN_STRPBRK:
10852 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10853
10854 case BUILT_IN_EXPECT:
10855 return fold_builtin_expect (loc, arg0, arg1);
10856
10857 CASE_FLT_FN (BUILT_IN_POW):
10858 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10859
10860 CASE_FLT_FN (BUILT_IN_POWI):
10861 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10862
10863 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10864 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10865
10866 CASE_FLT_FN (BUILT_IN_FMIN):
10867 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10868
10869 CASE_FLT_FN (BUILT_IN_FMAX):
10870 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10871
10872 case BUILT_IN_ISGREATER:
10873 return fold_builtin_unordered_cmp (loc, fndecl,
10874 arg0, arg1, UNLE_EXPR, LE_EXPR);
10875 case BUILT_IN_ISGREATEREQUAL:
10876 return fold_builtin_unordered_cmp (loc, fndecl,
10877 arg0, arg1, UNLT_EXPR, LT_EXPR);
10878 case BUILT_IN_ISLESS:
10879 return fold_builtin_unordered_cmp (loc, fndecl,
10880 arg0, arg1, UNGE_EXPR, GE_EXPR);
10881 case BUILT_IN_ISLESSEQUAL:
10882 return fold_builtin_unordered_cmp (loc, fndecl,
10883 arg0, arg1, UNGT_EXPR, GT_EXPR);
10884 case BUILT_IN_ISLESSGREATER:
10885 return fold_builtin_unordered_cmp (loc, fndecl,
10886 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10887 case BUILT_IN_ISUNORDERED:
10888 return fold_builtin_unordered_cmp (loc, fndecl,
10889 arg0, arg1, UNORDERED_EXPR,
10890 NOP_EXPR);
10891
10892 /* We do the folding for va_start in the expander. */
10893 case BUILT_IN_VA_START:
10894 break;
10895
10896 case BUILT_IN_SPRINTF:
10897 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10898
10899 case BUILT_IN_OBJECT_SIZE:
10900 return fold_builtin_object_size (arg0, arg1);
10901
10902 case BUILT_IN_PRINTF:
10903 case BUILT_IN_PRINTF_UNLOCKED:
10904 case BUILT_IN_VPRINTF:
10905 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10906
10907 case BUILT_IN_PRINTF_CHK:
10908 case BUILT_IN_VPRINTF_CHK:
10909 if (!validate_arg (arg0, INTEGER_TYPE)
10910 || TREE_SIDE_EFFECTS (arg0))
10911 return NULL_TREE;
10912 else
10913 return fold_builtin_printf (loc, fndecl,
10914 arg1, NULL_TREE, ignore, fcode);
10915 break;
10916
10917 case BUILT_IN_FPRINTF:
10918 case BUILT_IN_FPRINTF_UNLOCKED:
10919 case BUILT_IN_VFPRINTF:
10920 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10921 ignore, fcode);
10922
10923 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10924 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10925
10926 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10927 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10928
10929 default:
10930 break;
10931 }
10932 return NULL_TREE;
10933 }
10934
10935 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10936 and ARG2. IGNORE is true if the result of the function call is ignored.
10937 This function returns NULL_TREE if no simplification was possible. */
10938
10939 static tree
10940 fold_builtin_3 (location_t loc, tree fndecl,
10941 tree arg0, tree arg1, tree arg2, bool ignore)
10942 {
10943 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10944 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10945 switch (fcode)
10946 {
10947
10948 CASE_FLT_FN (BUILT_IN_SINCOS):
10949 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10950
10951 CASE_FLT_FN (BUILT_IN_FMA):
10952 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10953 break;
10954
10955 CASE_FLT_FN (BUILT_IN_REMQUO):
10956 if (validate_arg (arg0, REAL_TYPE)
10957 && validate_arg (arg1, REAL_TYPE)
10958 && validate_arg (arg2, POINTER_TYPE))
10959 return do_mpfr_remquo (arg0, arg1, arg2);
10960 break;
10961
10962 case BUILT_IN_MEMSET:
10963 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10964
10965 case BUILT_IN_BCOPY:
10966 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10967 void_type_node, true, /*endp=*/3);
10968
10969 case BUILT_IN_MEMCPY:
10970 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10971 type, ignore, /*endp=*/0);
10972
10973 case BUILT_IN_MEMPCPY:
10974 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10975 type, ignore, /*endp=*/1);
10976
10977 case BUILT_IN_MEMMOVE:
10978 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10979 type, ignore, /*endp=*/3);
10980
10981 case BUILT_IN_STRNCAT:
10982 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10983
10984 case BUILT_IN_STRNCPY:
10985 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10986
10987 case BUILT_IN_STRNCMP:
10988 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10989
10990 case BUILT_IN_MEMCHR:
10991 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10992
10993 case BUILT_IN_BCMP:
10994 case BUILT_IN_MEMCMP:
10995 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10996
10997 case BUILT_IN_SPRINTF:
10998 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10999
11000 case BUILT_IN_SNPRINTF:
11001 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
11002
11003 case BUILT_IN_STRCPY_CHK:
11004 case BUILT_IN_STPCPY_CHK:
11005 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
11006 ignore, fcode);
11007
11008 case BUILT_IN_STRCAT_CHK:
11009 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11010
11011 case BUILT_IN_PRINTF_CHK:
11012 case BUILT_IN_VPRINTF_CHK:
11013 if (!validate_arg (arg0, INTEGER_TYPE)
11014 || TREE_SIDE_EFFECTS (arg0))
11015 return NULL_TREE;
11016 else
11017 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11018 break;
11019
11020 case BUILT_IN_FPRINTF:
11021 case BUILT_IN_FPRINTF_UNLOCKED:
11022 case BUILT_IN_VFPRINTF:
11023 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11024 ignore, fcode);
11025
11026 case BUILT_IN_FPRINTF_CHK:
11027 case BUILT_IN_VFPRINTF_CHK:
11028 if (!validate_arg (arg1, INTEGER_TYPE)
11029 || TREE_SIDE_EFFECTS (arg1))
11030 return NULL_TREE;
11031 else
11032 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11033 ignore, fcode);
11034
11035 default:
11036 break;
11037 }
11038 return NULL_TREE;
11039 }
11040
11041 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11042 ARG2, and ARG3. IGNORE is true if the result of the function call is
11043 ignored. This function returns NULL_TREE if no simplification was
11044 possible. */
11045
11046 static tree
11047 fold_builtin_4 (location_t loc, tree fndecl,
11048 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11049 {
11050 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11051
11052 switch (fcode)
11053 {
11054 case BUILT_IN_MEMCPY_CHK:
11055 case BUILT_IN_MEMPCPY_CHK:
11056 case BUILT_IN_MEMMOVE_CHK:
11057 case BUILT_IN_MEMSET_CHK:
11058 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11059 NULL_TREE, ignore,
11060 DECL_FUNCTION_CODE (fndecl));
11061
11062 case BUILT_IN_STRNCPY_CHK:
11063 case BUILT_IN_STPNCPY_CHK:
11064 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11065 ignore, fcode);
11066
11067 case BUILT_IN_STRNCAT_CHK:
11068 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11069
11070 case BUILT_IN_SNPRINTF:
11071 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11072
11073 case BUILT_IN_FPRINTF_CHK:
11074 case BUILT_IN_VFPRINTF_CHK:
11075 if (!validate_arg (arg1, INTEGER_TYPE)
11076 || TREE_SIDE_EFFECTS (arg1))
11077 return NULL_TREE;
11078 else
11079 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11080 ignore, fcode);
11081 break;
11082
11083 default:
11084 break;
11085 }
11086 return NULL_TREE;
11087 }
11088
11089 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11090 arguments, where NARGS <= 4. IGNORE is true if the result of the
11091 function call is ignored. This function returns NULL_TREE if no
11092 simplification was possible. Note that this only folds builtins with
11093 fixed argument patterns. Foldings that do varargs-to-varargs
11094 transformations, or that match calls with more than 4 arguments,
11095 need to be handled with fold_builtin_varargs instead. */
11096
11097 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11098
11099 static tree
11100 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11101 {
11102 tree ret = NULL_TREE;
11103
11104 switch (nargs)
11105 {
11106 case 0:
11107 ret = fold_builtin_0 (loc, fndecl, ignore);
11108 break;
11109 case 1:
11110 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11111 break;
11112 case 2:
11113 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11114 break;
11115 case 3:
11116 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11117 break;
11118 case 4:
11119 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11120 ignore);
11121 break;
11122 default:
11123 break;
11124 }
11125 if (ret)
11126 {
11127 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11128 SET_EXPR_LOCATION (ret, loc);
11129 TREE_NO_WARNING (ret) = 1;
11130 return ret;
11131 }
11132 return NULL_TREE;
11133 }
11134
11135 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11136 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11137 of arguments in ARGS to be omitted. OLDNARGS is the number of
11138 elements in ARGS. */
11139
11140 static tree
11141 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11142 int skip, tree fndecl, int n, va_list newargs)
11143 {
11144 int nargs = oldnargs - skip + n;
11145 tree *buffer;
11146
11147 if (n > 0)
11148 {
11149 int i, j;
11150
11151 buffer = XALLOCAVEC (tree, nargs);
11152 for (i = 0; i < n; i++)
11153 buffer[i] = va_arg (newargs, tree);
11154 for (j = skip; j < oldnargs; j++, i++)
11155 buffer[i] = args[j];
11156 }
11157 else
11158 buffer = args + skip;
11159
11160 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11161 }
11162
11163 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11164 list ARGS along with N new arguments specified as the "..."
11165 parameters. SKIP is the number of arguments in ARGS to be omitted.
11166 OLDNARGS is the number of elements in ARGS. */
11167
11168 static tree
11169 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11170 int skip, tree fndecl, int n, ...)
11171 {
11172 va_list ap;
11173 tree t;
11174
11175 va_start (ap, n);
11176 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11177 va_end (ap);
11178
11179 return t;
11180 }
11181
11182 /* Return true if FNDECL shouldn't be folded right now.
11183 If a built-in function has an inline attribute always_inline
11184 wrapper, defer folding it after always_inline functions have
11185 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11186 might not be performed. */
11187
11188 bool
11189 avoid_folding_inline_builtin (tree fndecl)
11190 {
11191 return (DECL_DECLARED_INLINE_P (fndecl)
11192 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11193 && cfun
11194 && !cfun->always_inline_functions_inlined
11195 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11196 }
11197
11198 /* A wrapper function for builtin folding that prevents warnings for
11199 "statement without effect" and the like, caused by removing the
11200 call node earlier than the warning is generated. */
11201
11202 tree
11203 fold_call_expr (location_t loc, tree exp, bool ignore)
11204 {
11205 tree ret = NULL_TREE;
11206 tree fndecl = get_callee_fndecl (exp);
11207 if (fndecl
11208 && TREE_CODE (fndecl) == FUNCTION_DECL
11209 && DECL_BUILT_IN (fndecl)
11210 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11211 yet. Defer folding until we see all the arguments
11212 (after inlining). */
11213 && !CALL_EXPR_VA_ARG_PACK (exp))
11214 {
11215 int nargs = call_expr_nargs (exp);
11216
11217 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11218 instead last argument is __builtin_va_arg_pack (). Defer folding
11219 even in that case, until arguments are finalized. */
11220 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11221 {
11222 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11223 if (fndecl2
11224 && TREE_CODE (fndecl2) == FUNCTION_DECL
11225 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11226 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11227 return NULL_TREE;
11228 }
11229
11230 if (avoid_folding_inline_builtin (fndecl))
11231 return NULL_TREE;
11232
11233 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11234 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11235 CALL_EXPR_ARGP (exp), ignore);
11236 else
11237 {
11238 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11239 {
11240 tree *args = CALL_EXPR_ARGP (exp);
11241 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11242 }
11243 if (!ret)
11244 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11245 if (ret)
11246 return ret;
11247 }
11248 }
11249 return NULL_TREE;
11250 }
11251
11252 /* Conveniently construct a function call expression. FNDECL names the
11253 function to be called and N arguments are passed in the array
11254 ARGARRAY. */
11255
11256 tree
11257 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11258 {
11259 tree fntype = TREE_TYPE (fndecl);
11260 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11261
11262 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11263 }
11264
11265 /* Conveniently construct a function call expression. FNDECL names the
11266 function to be called and the arguments are passed in the vector
11267 VEC. */
11268
11269 tree
11270 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11271 {
11272 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11273 vec_safe_address (vec));
11274 }
11275
11276
11277 /* Conveniently construct a function call expression. FNDECL names the
11278 function to be called, N is the number of arguments, and the "..."
11279 parameters are the argument expressions. */
11280
11281 tree
11282 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11283 {
11284 va_list ap;
11285 tree *argarray = XALLOCAVEC (tree, n);
11286 int i;
11287
11288 va_start (ap, n);
11289 for (i = 0; i < n; i++)
11290 argarray[i] = va_arg (ap, tree);
11291 va_end (ap);
11292 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11293 }
11294
11295 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11296 varargs macros aren't supported by all bootstrap compilers. */
11297
11298 tree
11299 build_call_expr (tree fndecl, int n, ...)
11300 {
11301 va_list ap;
11302 tree *argarray = XALLOCAVEC (tree, n);
11303 int i;
11304
11305 va_start (ap, n);
11306 for (i = 0; i < n; i++)
11307 argarray[i] = va_arg (ap, tree);
11308 va_end (ap);
11309 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11310 }
11311
11312 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11313 N arguments are passed in the array ARGARRAY. */
11314
11315 tree
11316 fold_builtin_call_array (location_t loc, tree type,
11317 tree fn,
11318 int n,
11319 tree *argarray)
11320 {
11321 tree ret = NULL_TREE;
11322 tree exp;
11323
11324 if (TREE_CODE (fn) == ADDR_EXPR)
11325 {
11326 tree fndecl = TREE_OPERAND (fn, 0);
11327 if (TREE_CODE (fndecl) == FUNCTION_DECL
11328 && DECL_BUILT_IN (fndecl))
11329 {
11330 /* If last argument is __builtin_va_arg_pack (), arguments to this
11331 function are not finalized yet. Defer folding until they are. */
11332 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11333 {
11334 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11335 if (fndecl2
11336 && TREE_CODE (fndecl2) == FUNCTION_DECL
11337 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11338 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11339 return build_call_array_loc (loc, type, fn, n, argarray);
11340 }
11341 if (avoid_folding_inline_builtin (fndecl))
11342 return build_call_array_loc (loc, type, fn, n, argarray);
11343 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11344 {
11345 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11346 if (ret)
11347 return ret;
11348
11349 return build_call_array_loc (loc, type, fn, n, argarray);
11350 }
11351 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11352 {
11353 /* First try the transformations that don't require consing up
11354 an exp. */
11355 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11356 if (ret)
11357 return ret;
11358 }
11359
11360 /* If we got this far, we need to build an exp. */
11361 exp = build_call_array_loc (loc, type, fn, n, argarray);
11362 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11363 return ret ? ret : exp;
11364 }
11365 }
11366
11367 return build_call_array_loc (loc, type, fn, n, argarray);
11368 }
11369
11370 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11371 along with N new arguments specified as the "..." parameters. SKIP
11372 is the number of arguments in EXP to be omitted. This function is used
11373 to do varargs-to-varargs transformations. */
11374
11375 static tree
11376 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11377 {
11378 va_list ap;
11379 tree t;
11380
11381 va_start (ap, n);
11382 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11383 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11384 va_end (ap);
11385
11386 return t;
11387 }
11388
11389 /* Validate a single argument ARG against a tree code CODE representing
11390 a type. */
11391
11392 static bool
11393 validate_arg (const_tree arg, enum tree_code code)
11394 {
11395 if (!arg)
11396 return false;
11397 else if (code == POINTER_TYPE)
11398 return POINTER_TYPE_P (TREE_TYPE (arg));
11399 else if (code == INTEGER_TYPE)
11400 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11401 return code == TREE_CODE (TREE_TYPE (arg));
11402 }
11403
11404 /* This function validates the types of a function call argument list
11405 against a specified list of tree_codes. If the last specifier is a 0,
11406 that represents an ellipses, otherwise the last specifier must be a
11407 VOID_TYPE.
11408
11409 This is the GIMPLE version of validate_arglist. Eventually we want to
11410 completely convert builtins.c to work from GIMPLEs and the tree based
11411 validate_arglist will then be removed. */
11412
11413 bool
11414 validate_gimple_arglist (const_gimple call, ...)
11415 {
11416 enum tree_code code;
11417 bool res = 0;
11418 va_list ap;
11419 const_tree arg;
11420 size_t i;
11421
11422 va_start (ap, call);
11423 i = 0;
11424
11425 do
11426 {
11427 code = (enum tree_code) va_arg (ap, int);
11428 switch (code)
11429 {
11430 case 0:
11431 /* This signifies an ellipses, any further arguments are all ok. */
11432 res = true;
11433 goto end;
11434 case VOID_TYPE:
11435 /* This signifies an endlink, if no arguments remain, return
11436 true, otherwise return false. */
11437 res = (i == gimple_call_num_args (call));
11438 goto end;
11439 default:
11440 /* If no parameters remain or the parameter's code does not
11441 match the specified code, return false. Otherwise continue
11442 checking any remaining arguments. */
11443 arg = gimple_call_arg (call, i++);
11444 if (!validate_arg (arg, code))
11445 goto end;
11446 break;
11447 }
11448 }
11449 while (1);
11450
11451 /* We need gotos here since we can only have one VA_CLOSE in a
11452 function. */
11453 end: ;
11454 va_end (ap);
11455
11456 return res;
11457 }
11458
11459 /* Default target-specific builtin expander that does nothing. */
11460
11461 rtx
11462 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11463 rtx target ATTRIBUTE_UNUSED,
11464 rtx subtarget ATTRIBUTE_UNUSED,
11465 enum machine_mode mode ATTRIBUTE_UNUSED,
11466 int ignore ATTRIBUTE_UNUSED)
11467 {
11468 return NULL_RTX;
11469 }
11470
11471 /* Returns true is EXP represents data that would potentially reside
11472 in a readonly section. */
11473
11474 static bool
11475 readonly_data_expr (tree exp)
11476 {
11477 STRIP_NOPS (exp);
11478
11479 if (TREE_CODE (exp) != ADDR_EXPR)
11480 return false;
11481
11482 exp = get_base_address (TREE_OPERAND (exp, 0));
11483 if (!exp)
11484 return false;
11485
11486 /* Make sure we call decl_readonly_section only for trees it
11487 can handle (since it returns true for everything it doesn't
11488 understand). */
11489 if (TREE_CODE (exp) == STRING_CST
11490 || TREE_CODE (exp) == CONSTRUCTOR
11491 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11492 return decl_readonly_section (exp, 0);
11493 else
11494 return false;
11495 }
11496
11497 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11498 to the call, and TYPE is its return type.
11499
11500 Return NULL_TREE if no simplification was possible, otherwise return the
11501 simplified form of the call as a tree.
11502
11503 The simplified form may be a constant or other expression which
11504 computes the same value, but in a more efficient manner (including
11505 calls to other builtin functions).
11506
11507 The call may contain arguments which need to be evaluated, but
11508 which are not useful to determine the result of the call. In
11509 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11510 COMPOUND_EXPR will be an argument which must be evaluated.
11511 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11512 COMPOUND_EXPR in the chain will contain the tree for the simplified
11513 form of the builtin function call. */
11514
11515 static tree
11516 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11517 {
11518 if (!validate_arg (s1, POINTER_TYPE)
11519 || !validate_arg (s2, POINTER_TYPE))
11520 return NULL_TREE;
11521 else
11522 {
11523 tree fn;
11524 const char *p1, *p2;
11525
11526 p2 = c_getstr (s2);
11527 if (p2 == NULL)
11528 return NULL_TREE;
11529
11530 p1 = c_getstr (s1);
11531 if (p1 != NULL)
11532 {
11533 const char *r = strstr (p1, p2);
11534 tree tem;
11535
11536 if (r == NULL)
11537 return build_int_cst (TREE_TYPE (s1), 0);
11538
11539 /* Return an offset into the constant string argument. */
11540 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11541 return fold_convert_loc (loc, type, tem);
11542 }
11543
11544 /* The argument is const char *, and the result is char *, so we need
11545 a type conversion here to avoid a warning. */
11546 if (p2[0] == '\0')
11547 return fold_convert_loc (loc, type, s1);
11548
11549 if (p2[1] != '\0')
11550 return NULL_TREE;
11551
11552 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11553 if (!fn)
11554 return NULL_TREE;
11555
11556 /* New argument list transforming strstr(s1, s2) to
11557 strchr(s1, s2[0]). */
11558 return build_call_expr_loc (loc, fn, 2, s1,
11559 build_int_cst (integer_type_node, p2[0]));
11560 }
11561 }
11562
11563 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11564 the call, and TYPE is its return type.
11565
11566 Return NULL_TREE if no simplification was possible, otherwise return the
11567 simplified form of the call as a tree.
11568
11569 The simplified form may be a constant or other expression which
11570 computes the same value, but in a more efficient manner (including
11571 calls to other builtin functions).
11572
11573 The call may contain arguments which need to be evaluated, but
11574 which are not useful to determine the result of the call. In
11575 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11576 COMPOUND_EXPR will be an argument which must be evaluated.
11577 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11578 COMPOUND_EXPR in the chain will contain the tree for the simplified
11579 form of the builtin function call. */
11580
11581 static tree
11582 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11583 {
11584 if (!validate_arg (s1, POINTER_TYPE)
11585 || !validate_arg (s2, INTEGER_TYPE))
11586 return NULL_TREE;
11587 else
11588 {
11589 const char *p1;
11590
11591 if (TREE_CODE (s2) != INTEGER_CST)
11592 return NULL_TREE;
11593
11594 p1 = c_getstr (s1);
11595 if (p1 != NULL)
11596 {
11597 char c;
11598 const char *r;
11599 tree tem;
11600
11601 if (target_char_cast (s2, &c))
11602 return NULL_TREE;
11603
11604 r = strchr (p1, c);
11605
11606 if (r == NULL)
11607 return build_int_cst (TREE_TYPE (s1), 0);
11608
11609 /* Return an offset into the constant string argument. */
11610 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11611 return fold_convert_loc (loc, type, tem);
11612 }
11613 return NULL_TREE;
11614 }
11615 }
11616
11617 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11618 the call, and TYPE is its return type.
11619
11620 Return NULL_TREE if no simplification was possible, otherwise return the
11621 simplified form of the call as a tree.
11622
11623 The simplified form may be a constant or other expression which
11624 computes the same value, but in a more efficient manner (including
11625 calls to other builtin functions).
11626
11627 The call may contain arguments which need to be evaluated, but
11628 which are not useful to determine the result of the call. In
11629 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11630 COMPOUND_EXPR will be an argument which must be evaluated.
11631 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11632 COMPOUND_EXPR in the chain will contain the tree for the simplified
11633 form of the builtin function call. */
11634
11635 static tree
11636 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11637 {
11638 if (!validate_arg (s1, POINTER_TYPE)
11639 || !validate_arg (s2, INTEGER_TYPE))
11640 return NULL_TREE;
11641 else
11642 {
11643 tree fn;
11644 const char *p1;
11645
11646 if (TREE_CODE (s2) != INTEGER_CST)
11647 return NULL_TREE;
11648
11649 p1 = c_getstr (s1);
11650 if (p1 != NULL)
11651 {
11652 char c;
11653 const char *r;
11654 tree tem;
11655
11656 if (target_char_cast (s2, &c))
11657 return NULL_TREE;
11658
11659 r = strrchr (p1, c);
11660
11661 if (r == NULL)
11662 return build_int_cst (TREE_TYPE (s1), 0);
11663
11664 /* Return an offset into the constant string argument. */
11665 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11666 return fold_convert_loc (loc, type, tem);
11667 }
11668
11669 if (! integer_zerop (s2))
11670 return NULL_TREE;
11671
11672 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11673 if (!fn)
11674 return NULL_TREE;
11675
11676 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11677 return build_call_expr_loc (loc, fn, 2, s1, s2);
11678 }
11679 }
11680
11681 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11682 to the call, and TYPE is its return type.
11683
11684 Return NULL_TREE if no simplification was possible, otherwise return the
11685 simplified form of the call as a tree.
11686
11687 The simplified form may be a constant or other expression which
11688 computes the same value, but in a more efficient manner (including
11689 calls to other builtin functions).
11690
11691 The call may contain arguments which need to be evaluated, but
11692 which are not useful to determine the result of the call. In
11693 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11694 COMPOUND_EXPR will be an argument which must be evaluated.
11695 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11696 COMPOUND_EXPR in the chain will contain the tree for the simplified
11697 form of the builtin function call. */
11698
11699 static tree
11700 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11701 {
11702 if (!validate_arg (s1, POINTER_TYPE)
11703 || !validate_arg (s2, POINTER_TYPE))
11704 return NULL_TREE;
11705 else
11706 {
11707 tree fn;
11708 const char *p1, *p2;
11709
11710 p2 = c_getstr (s2);
11711 if (p2 == NULL)
11712 return NULL_TREE;
11713
11714 p1 = c_getstr (s1);
11715 if (p1 != NULL)
11716 {
11717 const char *r = strpbrk (p1, p2);
11718 tree tem;
11719
11720 if (r == NULL)
11721 return build_int_cst (TREE_TYPE (s1), 0);
11722
11723 /* Return an offset into the constant string argument. */
11724 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11725 return fold_convert_loc (loc, type, tem);
11726 }
11727
11728 if (p2[0] == '\0')
11729 /* strpbrk(x, "") == NULL.
11730 Evaluate and ignore s1 in case it had side-effects. */
11731 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11732
11733 if (p2[1] != '\0')
11734 return NULL_TREE; /* Really call strpbrk. */
11735
11736 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11737 if (!fn)
11738 return NULL_TREE;
11739
11740 /* New argument list transforming strpbrk(s1, s2) to
11741 strchr(s1, s2[0]). */
11742 return build_call_expr_loc (loc, fn, 2, s1,
11743 build_int_cst (integer_type_node, p2[0]));
11744 }
11745 }
11746
11747 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11748 to the call.
11749
11750 Return NULL_TREE if no simplification was possible, otherwise return the
11751 simplified form of the call as a tree.
11752
11753 The simplified form may be a constant or other expression which
11754 computes the same value, but in a more efficient manner (including
11755 calls to other builtin functions).
11756
11757 The call may contain arguments which need to be evaluated, but
11758 which are not useful to determine the result of the call. In
11759 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11760 COMPOUND_EXPR will be an argument which must be evaluated.
11761 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11762 COMPOUND_EXPR in the chain will contain the tree for the simplified
11763 form of the builtin function call. */
11764
11765 tree
11766 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src,
11767 tree len)
11768 {
11769 if (!validate_arg (dst, POINTER_TYPE)
11770 || !validate_arg (src, POINTER_TYPE))
11771 return NULL_TREE;
11772 else
11773 {
11774 const char *p = c_getstr (src);
11775
11776 /* If the string length is zero, return the dst parameter. */
11777 if (p && *p == '\0')
11778 return dst;
11779
11780 if (optimize_insn_for_speed_p ())
11781 {
11782 /* See if we can store by pieces into (dst + strlen(dst)). */
11783 tree newdst, call;
11784 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11785 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
11786
11787 if (!strlen_fn || !memcpy_fn)
11788 return NULL_TREE;
11789
11790 /* If the length of the source string isn't computable don't
11791 split strcat into strlen and memcpy. */
11792 if (! len)
11793 len = c_strlen (src, 1);
11794 if (! len || TREE_SIDE_EFFECTS (len))
11795 return NULL_TREE;
11796
11797 /* Stabilize the argument list. */
11798 dst = builtin_save_expr (dst);
11799
11800 /* Create strlen (dst). */
11801 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11802 /* Create (dst p+ strlen (dst)). */
11803
11804 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11805 newdst = builtin_save_expr (newdst);
11806
11807 len = fold_convert_loc (loc, size_type_node, len);
11808 len = size_binop_loc (loc, PLUS_EXPR, len,
11809 build_int_cst (size_type_node, 1));
11810
11811 call = build_call_expr_loc (loc, memcpy_fn, 3, newdst, src, len);
11812 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11813 }
11814 return NULL_TREE;
11815 }
11816 }
11817
11818 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11819 arguments to the call.
11820
11821 Return NULL_TREE if no simplification was possible, otherwise return the
11822 simplified form of the call as a tree.
11823
11824 The simplified form may be a constant or other expression which
11825 computes the same value, but in a more efficient manner (including
11826 calls to other builtin functions).
11827
11828 The call may contain arguments which need to be evaluated, but
11829 which are not useful to determine the result of the call. In
11830 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11831 COMPOUND_EXPR will be an argument which must be evaluated.
11832 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11833 COMPOUND_EXPR in the chain will contain the tree for the simplified
11834 form of the builtin function call. */
11835
11836 static tree
11837 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11838 {
11839 if (!validate_arg (dst, POINTER_TYPE)
11840 || !validate_arg (src, POINTER_TYPE)
11841 || !validate_arg (len, INTEGER_TYPE))
11842 return NULL_TREE;
11843 else
11844 {
11845 const char *p = c_getstr (src);
11846
11847 /* If the requested length is zero, or the src parameter string
11848 length is zero, return the dst parameter. */
11849 if (integer_zerop (len) || (p && *p == '\0'))
11850 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11851
11852 /* If the requested len is greater than or equal to the string
11853 length, call strcat. */
11854 if (TREE_CODE (len) == INTEGER_CST && p
11855 && compare_tree_int (len, strlen (p)) >= 0)
11856 {
11857 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11858
11859 /* If the replacement _DECL isn't initialized, don't do the
11860 transformation. */
11861 if (!fn)
11862 return NULL_TREE;
11863
11864 return build_call_expr_loc (loc, fn, 2, dst, src);
11865 }
11866 return NULL_TREE;
11867 }
11868 }
11869
11870 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11871 to the call.
11872
11873 Return NULL_TREE if no simplification was possible, otherwise return the
11874 simplified form of the call as a tree.
11875
11876 The simplified form may be a constant or other expression which
11877 computes the same value, but in a more efficient manner (including
11878 calls to other builtin functions).
11879
11880 The call may contain arguments which need to be evaluated, but
11881 which are not useful to determine the result of the call. In
11882 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11883 COMPOUND_EXPR will be an argument which must be evaluated.
11884 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11885 COMPOUND_EXPR in the chain will contain the tree for the simplified
11886 form of the builtin function call. */
11887
11888 static tree
11889 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11890 {
11891 if (!validate_arg (s1, POINTER_TYPE)
11892 || !validate_arg (s2, POINTER_TYPE))
11893 return NULL_TREE;
11894 else
11895 {
11896 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11897
11898 /* If both arguments are constants, evaluate at compile-time. */
11899 if (p1 && p2)
11900 {
11901 const size_t r = strspn (p1, p2);
11902 return build_int_cst (size_type_node, r);
11903 }
11904
11905 /* If either argument is "", return NULL_TREE. */
11906 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11907 /* Evaluate and ignore both arguments in case either one has
11908 side-effects. */
11909 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11910 s1, s2);
11911 return NULL_TREE;
11912 }
11913 }
11914
11915 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11916 to the call.
11917
11918 Return NULL_TREE if no simplification was possible, otherwise return the
11919 simplified form of the call as a tree.
11920
11921 The simplified form may be a constant or other expression which
11922 computes the same value, but in a more efficient manner (including
11923 calls to other builtin functions).
11924
11925 The call may contain arguments which need to be evaluated, but
11926 which are not useful to determine the result of the call. In
11927 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11928 COMPOUND_EXPR will be an argument which must be evaluated.
11929 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11930 COMPOUND_EXPR in the chain will contain the tree for the simplified
11931 form of the builtin function call. */
11932
11933 static tree
11934 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11935 {
11936 if (!validate_arg (s1, POINTER_TYPE)
11937 || !validate_arg (s2, POINTER_TYPE))
11938 return NULL_TREE;
11939 else
11940 {
11941 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11942
11943 /* If both arguments are constants, evaluate at compile-time. */
11944 if (p1 && p2)
11945 {
11946 const size_t r = strcspn (p1, p2);
11947 return build_int_cst (size_type_node, r);
11948 }
11949
11950 /* If the first argument is "", return NULL_TREE. */
11951 if (p1 && *p1 == '\0')
11952 {
11953 /* Evaluate and ignore argument s2 in case it has
11954 side-effects. */
11955 return omit_one_operand_loc (loc, size_type_node,
11956 size_zero_node, s2);
11957 }
11958
11959 /* If the second argument is "", return __builtin_strlen(s1). */
11960 if (p2 && *p2 == '\0')
11961 {
11962 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11963
11964 /* If the replacement _DECL isn't initialized, don't do the
11965 transformation. */
11966 if (!fn)
11967 return NULL_TREE;
11968
11969 return build_call_expr_loc (loc, fn, 1, s1);
11970 }
11971 return NULL_TREE;
11972 }
11973 }
11974
11975 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11976 to the call. IGNORE is true if the value returned
11977 by the builtin will be ignored. UNLOCKED is true is true if this
11978 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11979 the known length of the string. Return NULL_TREE if no simplification
11980 was possible. */
11981
11982 tree
11983 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11984 bool ignore, bool unlocked, tree len)
11985 {
11986 /* If we're using an unlocked function, assume the other unlocked
11987 functions exist explicitly. */
11988 tree const fn_fputc = (unlocked
11989 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11990 : builtin_decl_implicit (BUILT_IN_FPUTC));
11991 tree const fn_fwrite = (unlocked
11992 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11993 : builtin_decl_implicit (BUILT_IN_FWRITE));
11994
11995 /* If the return value is used, don't do the transformation. */
11996 if (!ignore)
11997 return NULL_TREE;
11998
11999 /* Verify the arguments in the original call. */
12000 if (!validate_arg (arg0, POINTER_TYPE)
12001 || !validate_arg (arg1, POINTER_TYPE))
12002 return NULL_TREE;
12003
12004 if (! len)
12005 len = c_strlen (arg0, 0);
12006
12007 /* Get the length of the string passed to fputs. If the length
12008 can't be determined, punt. */
12009 if (!len
12010 || TREE_CODE (len) != INTEGER_CST)
12011 return NULL_TREE;
12012
12013 switch (compare_tree_int (len, 1))
12014 {
12015 case -1: /* length is 0, delete the call entirely . */
12016 return omit_one_operand_loc (loc, integer_type_node,
12017 integer_zero_node, arg1);;
12018
12019 case 0: /* length is 1, call fputc. */
12020 {
12021 const char *p = c_getstr (arg0);
12022
12023 if (p != NULL)
12024 {
12025 if (fn_fputc)
12026 return build_call_expr_loc (loc, fn_fputc, 2,
12027 build_int_cst
12028 (integer_type_node, p[0]), arg1);
12029 else
12030 return NULL_TREE;
12031 }
12032 }
12033 /* FALLTHROUGH */
12034 case 1: /* length is greater than 1, call fwrite. */
12035 {
12036 /* If optimizing for size keep fputs. */
12037 if (optimize_function_for_size_p (cfun))
12038 return NULL_TREE;
12039 /* New argument list transforming fputs(string, stream) to
12040 fwrite(string, 1, len, stream). */
12041 if (fn_fwrite)
12042 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12043 size_one_node, len, arg1);
12044 else
12045 return NULL_TREE;
12046 }
12047 default:
12048 gcc_unreachable ();
12049 }
12050 return NULL_TREE;
12051 }
12052
12053 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12054 produced. False otherwise. This is done so that we don't output the error
12055 or warning twice or three times. */
12056
12057 bool
12058 fold_builtin_next_arg (tree exp, bool va_start_p)
12059 {
12060 tree fntype = TREE_TYPE (current_function_decl);
12061 int nargs = call_expr_nargs (exp);
12062 tree arg;
12063 /* There is good chance the current input_location points inside the
12064 definition of the va_start macro (perhaps on the token for
12065 builtin) in a system header, so warnings will not be emitted.
12066 Use the location in real source code. */
12067 source_location current_location =
12068 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12069 NULL);
12070
12071 if (!stdarg_p (fntype))
12072 {
12073 error ("%<va_start%> used in function with fixed args");
12074 return true;
12075 }
12076
12077 if (va_start_p)
12078 {
12079 if (va_start_p && (nargs != 2))
12080 {
12081 error ("wrong number of arguments to function %<va_start%>");
12082 return true;
12083 }
12084 arg = CALL_EXPR_ARG (exp, 1);
12085 }
12086 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12087 when we checked the arguments and if needed issued a warning. */
12088 else
12089 {
12090 if (nargs == 0)
12091 {
12092 /* Evidently an out of date version of <stdarg.h>; can't validate
12093 va_start's second argument, but can still work as intended. */
12094 warning_at (current_location,
12095 OPT_Wvarargs,
12096 "%<__builtin_next_arg%> called without an argument");
12097 return true;
12098 }
12099 else if (nargs > 1)
12100 {
12101 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12102 return true;
12103 }
12104 arg = CALL_EXPR_ARG (exp, 0);
12105 }
12106
12107 if (TREE_CODE (arg) == SSA_NAME)
12108 arg = SSA_NAME_VAR (arg);
12109
12110 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12111 or __builtin_next_arg (0) the first time we see it, after checking
12112 the arguments and if needed issuing a warning. */
12113 if (!integer_zerop (arg))
12114 {
12115 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12116
12117 /* Strip off all nops for the sake of the comparison. This
12118 is not quite the same as STRIP_NOPS. It does more.
12119 We must also strip off INDIRECT_EXPR for C++ reference
12120 parameters. */
12121 while (CONVERT_EXPR_P (arg)
12122 || TREE_CODE (arg) == INDIRECT_REF)
12123 arg = TREE_OPERAND (arg, 0);
12124 if (arg != last_parm)
12125 {
12126 /* FIXME: Sometimes with the tree optimizers we can get the
12127 not the last argument even though the user used the last
12128 argument. We just warn and set the arg to be the last
12129 argument so that we will get wrong-code because of
12130 it. */
12131 warning_at (current_location,
12132 OPT_Wvarargs,
12133 "second parameter of %<va_start%> not last named argument");
12134 }
12135
12136 /* Undefined by C99 7.15.1.4p4 (va_start):
12137 "If the parameter parmN is declared with the register storage
12138 class, with a function or array type, or with a type that is
12139 not compatible with the type that results after application of
12140 the default argument promotions, the behavior is undefined."
12141 */
12142 else if (DECL_REGISTER (arg))
12143 {
12144 warning_at (current_location,
12145 OPT_Wvarargs,
12146 "undefined behaviour when second parameter of "
12147 "%<va_start%> is declared with %<register%> storage");
12148 }
12149
12150 /* We want to verify the second parameter just once before the tree
12151 optimizers are run and then avoid keeping it in the tree,
12152 as otherwise we could warn even for correct code like:
12153 void foo (int i, ...)
12154 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12155 if (va_start_p)
12156 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12157 else
12158 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12159 }
12160 return false;
12161 }
12162
12163
12164 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12165 ORIG may be null if this is a 2-argument call. We don't attempt to
12166 simplify calls with more than 3 arguments.
12167
12168 Return NULL_TREE if no simplification was possible, otherwise return the
12169 simplified form of the call as a tree. If IGNORED is true, it means that
12170 the caller does not use the returned value of the function. */
12171
12172 static tree
12173 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12174 tree orig, int ignored)
12175 {
12176 tree call, retval;
12177 const char *fmt_str = NULL;
12178
12179 /* Verify the required arguments in the original call. We deal with two
12180 types of sprintf() calls: 'sprintf (str, fmt)' and
12181 'sprintf (dest, "%s", orig)'. */
12182 if (!validate_arg (dest, POINTER_TYPE)
12183 || !validate_arg (fmt, POINTER_TYPE))
12184 return NULL_TREE;
12185 if (orig && !validate_arg (orig, POINTER_TYPE))
12186 return NULL_TREE;
12187
12188 /* Check whether the format is a literal string constant. */
12189 fmt_str = c_getstr (fmt);
12190 if (fmt_str == NULL)
12191 return NULL_TREE;
12192
12193 call = NULL_TREE;
12194 retval = NULL_TREE;
12195
12196 if (!init_target_chars ())
12197 return NULL_TREE;
12198
12199 /* If the format doesn't contain % args or %%, use strcpy. */
12200 if (strchr (fmt_str, target_percent) == NULL)
12201 {
12202 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12203
12204 if (!fn)
12205 return NULL_TREE;
12206
12207 /* Don't optimize sprintf (buf, "abc", ptr++). */
12208 if (orig)
12209 return NULL_TREE;
12210
12211 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12212 'format' is known to contain no % formats. */
12213 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12214 if (!ignored)
12215 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12216 }
12217
12218 /* If the format is "%s", use strcpy if the result isn't used. */
12219 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12220 {
12221 tree fn;
12222 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12223
12224 if (!fn)
12225 return NULL_TREE;
12226
12227 /* Don't crash on sprintf (str1, "%s"). */
12228 if (!orig)
12229 return NULL_TREE;
12230
12231 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12232 if (!ignored)
12233 {
12234 retval = c_strlen (orig, 1);
12235 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12236 return NULL_TREE;
12237 }
12238 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12239 }
12240
12241 if (call && retval)
12242 {
12243 retval = fold_convert_loc
12244 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12245 retval);
12246 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12247 }
12248 else
12249 return call;
12250 }
12251
12252 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12253 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12254 attempt to simplify calls with more than 4 arguments.
12255
12256 Return NULL_TREE if no simplification was possible, otherwise return the
12257 simplified form of the call as a tree. If IGNORED is true, it means that
12258 the caller does not use the returned value of the function. */
12259
12260 static tree
12261 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12262 tree orig, int ignored)
12263 {
12264 tree call, retval;
12265 const char *fmt_str = NULL;
12266 unsigned HOST_WIDE_INT destlen;
12267
12268 /* Verify the required arguments in the original call. We deal with two
12269 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12270 'snprintf (dest, cst, "%s", orig)'. */
12271 if (!validate_arg (dest, POINTER_TYPE)
12272 || !validate_arg (destsize, INTEGER_TYPE)
12273 || !validate_arg (fmt, POINTER_TYPE))
12274 return NULL_TREE;
12275 if (orig && !validate_arg (orig, POINTER_TYPE))
12276 return NULL_TREE;
12277
12278 if (!tree_fits_uhwi_p (destsize))
12279 return NULL_TREE;
12280
12281 /* Check whether the format is a literal string constant. */
12282 fmt_str = c_getstr (fmt);
12283 if (fmt_str == NULL)
12284 return NULL_TREE;
12285
12286 call = NULL_TREE;
12287 retval = NULL_TREE;
12288
12289 if (!init_target_chars ())
12290 return NULL_TREE;
12291
12292 destlen = tree_to_uhwi (destsize);
12293
12294 /* If the format doesn't contain % args or %%, use strcpy. */
12295 if (strchr (fmt_str, target_percent) == NULL)
12296 {
12297 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12298 size_t len = strlen (fmt_str);
12299
12300 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12301 if (orig)
12302 return NULL_TREE;
12303
12304 /* We could expand this as
12305 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12306 or to
12307 memcpy (str, fmt_with_nul_at_cstm1, cst);
12308 but in the former case that might increase code size
12309 and in the latter case grow .rodata section too much.
12310 So punt for now. */
12311 if (len >= destlen)
12312 return NULL_TREE;
12313
12314 if (!fn)
12315 return NULL_TREE;
12316
12317 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12318 'format' is known to contain no % formats and
12319 strlen (fmt) < cst. */
12320 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12321
12322 if (!ignored)
12323 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12324 }
12325
12326 /* If the format is "%s", use strcpy if the result isn't used. */
12327 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12328 {
12329 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12330 unsigned HOST_WIDE_INT origlen;
12331
12332 /* Don't crash on snprintf (str1, cst, "%s"). */
12333 if (!orig)
12334 return NULL_TREE;
12335
12336 retval = c_strlen (orig, 1);
12337 if (!retval || !tree_fits_uhwi_p (retval))
12338 return NULL_TREE;
12339
12340 origlen = tree_to_uhwi (retval);
12341 /* We could expand this as
12342 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12343 or to
12344 memcpy (str1, str2_with_nul_at_cstm1, cst);
12345 but in the former case that might increase code size
12346 and in the latter case grow .rodata section too much.
12347 So punt for now. */
12348 if (origlen >= destlen)
12349 return NULL_TREE;
12350
12351 /* Convert snprintf (str1, cst, "%s", str2) into
12352 strcpy (str1, str2) if strlen (str2) < cst. */
12353 if (!fn)
12354 return NULL_TREE;
12355
12356 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12357
12358 if (ignored)
12359 retval = NULL_TREE;
12360 }
12361
12362 if (call && retval)
12363 {
12364 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12365 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12366 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12367 }
12368 else
12369 return call;
12370 }
12371
12372 /* Expand a call EXP to __builtin_object_size. */
12373
12374 rtx
12375 expand_builtin_object_size (tree exp)
12376 {
12377 tree ost;
12378 int object_size_type;
12379 tree fndecl = get_callee_fndecl (exp);
12380
12381 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12382 {
12383 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12384 exp, fndecl);
12385 expand_builtin_trap ();
12386 return const0_rtx;
12387 }
12388
12389 ost = CALL_EXPR_ARG (exp, 1);
12390 STRIP_NOPS (ost);
12391
12392 if (TREE_CODE (ost) != INTEGER_CST
12393 || tree_int_cst_sgn (ost) < 0
12394 || compare_tree_int (ost, 3) > 0)
12395 {
12396 error ("%Klast argument of %D is not integer constant between 0 and 3",
12397 exp, fndecl);
12398 expand_builtin_trap ();
12399 return const0_rtx;
12400 }
12401
12402 object_size_type = tree_to_shwi (ost);
12403
12404 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12405 }
12406
12407 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12408 FCODE is the BUILT_IN_* to use.
12409 Return NULL_RTX if we failed; the caller should emit a normal call,
12410 otherwise try to get the result in TARGET, if convenient (and in
12411 mode MODE if that's convenient). */
12412
12413 static rtx
12414 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12415 enum built_in_function fcode)
12416 {
12417 tree dest, src, len, size;
12418
12419 if (!validate_arglist (exp,
12420 POINTER_TYPE,
12421 fcode == BUILT_IN_MEMSET_CHK
12422 ? INTEGER_TYPE : POINTER_TYPE,
12423 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12424 return NULL_RTX;
12425
12426 dest = CALL_EXPR_ARG (exp, 0);
12427 src = CALL_EXPR_ARG (exp, 1);
12428 len = CALL_EXPR_ARG (exp, 2);
12429 size = CALL_EXPR_ARG (exp, 3);
12430
12431 if (! tree_fits_uhwi_p (size))
12432 return NULL_RTX;
12433
12434 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12435 {
12436 tree fn;
12437
12438 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12439 {
12440 warning_at (tree_nonartificial_location (exp),
12441 0, "%Kcall to %D will always overflow destination buffer",
12442 exp, get_callee_fndecl (exp));
12443 return NULL_RTX;
12444 }
12445
12446 fn = NULL_TREE;
12447 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12448 mem{cpy,pcpy,move,set} is available. */
12449 switch (fcode)
12450 {
12451 case BUILT_IN_MEMCPY_CHK:
12452 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12453 break;
12454 case BUILT_IN_MEMPCPY_CHK:
12455 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12456 break;
12457 case BUILT_IN_MEMMOVE_CHK:
12458 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12459 break;
12460 case BUILT_IN_MEMSET_CHK:
12461 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12462 break;
12463 default:
12464 break;
12465 }
12466
12467 if (! fn)
12468 return NULL_RTX;
12469
12470 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12471 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12472 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12473 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12474 }
12475 else if (fcode == BUILT_IN_MEMSET_CHK)
12476 return NULL_RTX;
12477 else
12478 {
12479 unsigned int dest_align = get_pointer_alignment (dest);
12480
12481 /* If DEST is not a pointer type, call the normal function. */
12482 if (dest_align == 0)
12483 return NULL_RTX;
12484
12485 /* If SRC and DEST are the same (and not volatile), do nothing. */
12486 if (operand_equal_p (src, dest, 0))
12487 {
12488 tree expr;
12489
12490 if (fcode != BUILT_IN_MEMPCPY_CHK)
12491 {
12492 /* Evaluate and ignore LEN in case it has side-effects. */
12493 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12494 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12495 }
12496
12497 expr = fold_build_pointer_plus (dest, len);
12498 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12499 }
12500
12501 /* __memmove_chk special case. */
12502 if (fcode == BUILT_IN_MEMMOVE_CHK)
12503 {
12504 unsigned int src_align = get_pointer_alignment (src);
12505
12506 if (src_align == 0)
12507 return NULL_RTX;
12508
12509 /* If src is categorized for a readonly section we can use
12510 normal __memcpy_chk. */
12511 if (readonly_data_expr (src))
12512 {
12513 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12514 if (!fn)
12515 return NULL_RTX;
12516 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12517 dest, src, len, size);
12518 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12519 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12520 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12521 }
12522 }
12523 return NULL_RTX;
12524 }
12525 }
12526
12527 /* Emit warning if a buffer overflow is detected at compile time. */
12528
12529 static void
12530 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12531 {
12532 int is_strlen = 0;
12533 tree len, size;
12534 location_t loc = tree_nonartificial_location (exp);
12535
12536 switch (fcode)
12537 {
12538 case BUILT_IN_STRCPY_CHK:
12539 case BUILT_IN_STPCPY_CHK:
12540 /* For __strcat_chk the warning will be emitted only if overflowing
12541 by at least strlen (dest) + 1 bytes. */
12542 case BUILT_IN_STRCAT_CHK:
12543 len = CALL_EXPR_ARG (exp, 1);
12544 size = CALL_EXPR_ARG (exp, 2);
12545 is_strlen = 1;
12546 break;
12547 case BUILT_IN_STRNCAT_CHK:
12548 case BUILT_IN_STRNCPY_CHK:
12549 case BUILT_IN_STPNCPY_CHK:
12550 len = CALL_EXPR_ARG (exp, 2);
12551 size = CALL_EXPR_ARG (exp, 3);
12552 break;
12553 case BUILT_IN_SNPRINTF_CHK:
12554 case BUILT_IN_VSNPRINTF_CHK:
12555 len = CALL_EXPR_ARG (exp, 1);
12556 size = CALL_EXPR_ARG (exp, 3);
12557 break;
12558 default:
12559 gcc_unreachable ();
12560 }
12561
12562 if (!len || !size)
12563 return;
12564
12565 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12566 return;
12567
12568 if (is_strlen)
12569 {
12570 len = c_strlen (len, 1);
12571 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12572 return;
12573 }
12574 else if (fcode == BUILT_IN_STRNCAT_CHK)
12575 {
12576 tree src = CALL_EXPR_ARG (exp, 1);
12577 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12578 return;
12579 src = c_strlen (src, 1);
12580 if (! src || ! tree_fits_uhwi_p (src))
12581 {
12582 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12583 exp, get_callee_fndecl (exp));
12584 return;
12585 }
12586 else if (tree_int_cst_lt (src, size))
12587 return;
12588 }
12589 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12590 return;
12591
12592 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12593 exp, get_callee_fndecl (exp));
12594 }
12595
12596 /* Emit warning if a buffer overflow is detected at compile time
12597 in __sprintf_chk/__vsprintf_chk calls. */
12598
12599 static void
12600 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12601 {
12602 tree size, len, fmt;
12603 const char *fmt_str;
12604 int nargs = call_expr_nargs (exp);
12605
12606 /* Verify the required arguments in the original call. */
12607
12608 if (nargs < 4)
12609 return;
12610 size = CALL_EXPR_ARG (exp, 2);
12611 fmt = CALL_EXPR_ARG (exp, 3);
12612
12613 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12614 return;
12615
12616 /* Check whether the format is a literal string constant. */
12617 fmt_str = c_getstr (fmt);
12618 if (fmt_str == NULL)
12619 return;
12620
12621 if (!init_target_chars ())
12622 return;
12623
12624 /* If the format doesn't contain % args or %%, we know its size. */
12625 if (strchr (fmt_str, target_percent) == 0)
12626 len = build_int_cstu (size_type_node, strlen (fmt_str));
12627 /* If the format is "%s" and first ... argument is a string literal,
12628 we know it too. */
12629 else if (fcode == BUILT_IN_SPRINTF_CHK
12630 && strcmp (fmt_str, target_percent_s) == 0)
12631 {
12632 tree arg;
12633
12634 if (nargs < 5)
12635 return;
12636 arg = CALL_EXPR_ARG (exp, 4);
12637 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12638 return;
12639
12640 len = c_strlen (arg, 1);
12641 if (!len || ! tree_fits_uhwi_p (len))
12642 return;
12643 }
12644 else
12645 return;
12646
12647 if (! tree_int_cst_lt (len, size))
12648 warning_at (tree_nonartificial_location (exp),
12649 0, "%Kcall to %D will always overflow destination buffer",
12650 exp, get_callee_fndecl (exp));
12651 }
12652
12653 /* Emit warning if a free is called with address of a variable. */
12654
12655 static void
12656 maybe_emit_free_warning (tree exp)
12657 {
12658 tree arg = CALL_EXPR_ARG (exp, 0);
12659
12660 STRIP_NOPS (arg);
12661 if (TREE_CODE (arg) != ADDR_EXPR)
12662 return;
12663
12664 arg = get_base_address (TREE_OPERAND (arg, 0));
12665 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12666 return;
12667
12668 if (SSA_VAR_P (arg))
12669 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12670 "%Kattempt to free a non-heap object %qD", exp, arg);
12671 else
12672 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12673 "%Kattempt to free a non-heap object", exp);
12674 }
12675
12676 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12677 if possible. */
12678
12679 tree
12680 fold_builtin_object_size (tree ptr, tree ost)
12681 {
12682 unsigned HOST_WIDE_INT bytes;
12683 int object_size_type;
12684
12685 if (!validate_arg (ptr, POINTER_TYPE)
12686 || !validate_arg (ost, INTEGER_TYPE))
12687 return NULL_TREE;
12688
12689 STRIP_NOPS (ost);
12690
12691 if (TREE_CODE (ost) != INTEGER_CST
12692 || tree_int_cst_sgn (ost) < 0
12693 || compare_tree_int (ost, 3) > 0)
12694 return NULL_TREE;
12695
12696 object_size_type = tree_to_shwi (ost);
12697
12698 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12699 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12700 and (size_t) 0 for types 2 and 3. */
12701 if (TREE_SIDE_EFFECTS (ptr))
12702 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12703
12704 if (TREE_CODE (ptr) == ADDR_EXPR)
12705 {
12706 bytes = compute_builtin_object_size (ptr, object_size_type);
12707 if (double_int_fits_to_tree_p (size_type_node,
12708 double_int::from_uhwi (bytes)))
12709 return build_int_cstu (size_type_node, bytes);
12710 }
12711 else if (TREE_CODE (ptr) == SSA_NAME)
12712 {
12713 /* If object size is not known yet, delay folding until
12714 later. Maybe subsequent passes will help determining
12715 it. */
12716 bytes = compute_builtin_object_size (ptr, object_size_type);
12717 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12718 && double_int_fits_to_tree_p (size_type_node,
12719 double_int::from_uhwi (bytes)))
12720 return build_int_cstu (size_type_node, bytes);
12721 }
12722
12723 return NULL_TREE;
12724 }
12725
12726 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12727 DEST, SRC, LEN, and SIZE are the arguments to the call.
12728 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12729 code of the builtin. If MAXLEN is not NULL, it is maximum length
12730 passed as third argument. */
12731
12732 tree
12733 fold_builtin_memory_chk (location_t loc, tree fndecl,
12734 tree dest, tree src, tree len, tree size,
12735 tree maxlen, bool ignore,
12736 enum built_in_function fcode)
12737 {
12738 tree fn;
12739
12740 if (!validate_arg (dest, POINTER_TYPE)
12741 || !validate_arg (src,
12742 (fcode == BUILT_IN_MEMSET_CHK
12743 ? INTEGER_TYPE : POINTER_TYPE))
12744 || !validate_arg (len, INTEGER_TYPE)
12745 || !validate_arg (size, INTEGER_TYPE))
12746 return NULL_TREE;
12747
12748 /* If SRC and DEST are the same (and not volatile), return DEST
12749 (resp. DEST+LEN for __mempcpy_chk). */
12750 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12751 {
12752 if (fcode != BUILT_IN_MEMPCPY_CHK)
12753 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12754 dest, len);
12755 else
12756 {
12757 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12758 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12759 }
12760 }
12761
12762 if (! tree_fits_uhwi_p (size))
12763 return NULL_TREE;
12764
12765 if (! integer_all_onesp (size))
12766 {
12767 if (! tree_fits_uhwi_p (len))
12768 {
12769 /* If LEN is not constant, try MAXLEN too.
12770 For MAXLEN only allow optimizing into non-_ocs function
12771 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12772 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12773 {
12774 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12775 {
12776 /* (void) __mempcpy_chk () can be optimized into
12777 (void) __memcpy_chk (). */
12778 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12779 if (!fn)
12780 return NULL_TREE;
12781
12782 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12783 }
12784 return NULL_TREE;
12785 }
12786 }
12787 else
12788 maxlen = len;
12789
12790 if (tree_int_cst_lt (size, maxlen))
12791 return NULL_TREE;
12792 }
12793
12794 fn = NULL_TREE;
12795 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12796 mem{cpy,pcpy,move,set} is available. */
12797 switch (fcode)
12798 {
12799 case BUILT_IN_MEMCPY_CHK:
12800 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12801 break;
12802 case BUILT_IN_MEMPCPY_CHK:
12803 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12804 break;
12805 case BUILT_IN_MEMMOVE_CHK:
12806 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12807 break;
12808 case BUILT_IN_MEMSET_CHK:
12809 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12810 break;
12811 default:
12812 break;
12813 }
12814
12815 if (!fn)
12816 return NULL_TREE;
12817
12818 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12819 }
12820
12821 /* Fold a call to the __st[rp]cpy_chk builtin.
12822 DEST, SRC, and SIZE are the arguments to the call.
12823 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12824 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12825 strings passed as second argument. */
12826
12827 tree
12828 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12829 tree src, tree size,
12830 tree maxlen, bool ignore,
12831 enum built_in_function fcode)
12832 {
12833 tree len, fn;
12834
12835 if (!validate_arg (dest, POINTER_TYPE)
12836 || !validate_arg (src, POINTER_TYPE)
12837 || !validate_arg (size, INTEGER_TYPE))
12838 return NULL_TREE;
12839
12840 /* If SRC and DEST are the same (and not volatile), return DEST. */
12841 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12842 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12843
12844 if (! tree_fits_uhwi_p (size))
12845 return NULL_TREE;
12846
12847 if (! integer_all_onesp (size))
12848 {
12849 len = c_strlen (src, 1);
12850 if (! len || ! tree_fits_uhwi_p (len))
12851 {
12852 /* If LEN is not constant, try MAXLEN too.
12853 For MAXLEN only allow optimizing into non-_ocs function
12854 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12855 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12856 {
12857 if (fcode == BUILT_IN_STPCPY_CHK)
12858 {
12859 if (! ignore)
12860 return NULL_TREE;
12861
12862 /* If return value of __stpcpy_chk is ignored,
12863 optimize into __strcpy_chk. */
12864 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12865 if (!fn)
12866 return NULL_TREE;
12867
12868 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12869 }
12870
12871 if (! len || TREE_SIDE_EFFECTS (len))
12872 return NULL_TREE;
12873
12874 /* If c_strlen returned something, but not a constant,
12875 transform __strcpy_chk into __memcpy_chk. */
12876 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12877 if (!fn)
12878 return NULL_TREE;
12879
12880 len = fold_convert_loc (loc, size_type_node, len);
12881 len = size_binop_loc (loc, PLUS_EXPR, len,
12882 build_int_cst (size_type_node, 1));
12883 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12884 build_call_expr_loc (loc, fn, 4,
12885 dest, src, len, size));
12886 }
12887 }
12888 else
12889 maxlen = len;
12890
12891 if (! tree_int_cst_lt (maxlen, size))
12892 return NULL_TREE;
12893 }
12894
12895 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12896 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12897 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12898 if (!fn)
12899 return NULL_TREE;
12900
12901 return build_call_expr_loc (loc, fn, 2, dest, src);
12902 }
12903
12904 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12905 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12906 length passed as third argument. IGNORE is true if return value can be
12907 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12908
12909 tree
12910 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12911 tree len, tree size, tree maxlen, bool ignore,
12912 enum built_in_function fcode)
12913 {
12914 tree fn;
12915
12916 if (!validate_arg (dest, POINTER_TYPE)
12917 || !validate_arg (src, POINTER_TYPE)
12918 || !validate_arg (len, INTEGER_TYPE)
12919 || !validate_arg (size, INTEGER_TYPE))
12920 return NULL_TREE;
12921
12922 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12923 {
12924 /* If return value of __stpncpy_chk is ignored,
12925 optimize into __strncpy_chk. */
12926 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12927 if (fn)
12928 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12929 }
12930
12931 if (! tree_fits_uhwi_p (size))
12932 return NULL_TREE;
12933
12934 if (! integer_all_onesp (size))
12935 {
12936 if (! tree_fits_uhwi_p (len))
12937 {
12938 /* If LEN is not constant, try MAXLEN too.
12939 For MAXLEN only allow optimizing into non-_ocs function
12940 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12941 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12942 return NULL_TREE;
12943 }
12944 else
12945 maxlen = len;
12946
12947 if (tree_int_cst_lt (size, maxlen))
12948 return NULL_TREE;
12949 }
12950
12951 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12952 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12953 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12954 if (!fn)
12955 return NULL_TREE;
12956
12957 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12958 }
12959
12960 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12961 are the arguments to the call. */
12962
12963 static tree
12964 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12965 tree src, tree size)
12966 {
12967 tree fn;
12968 const char *p;
12969
12970 if (!validate_arg (dest, POINTER_TYPE)
12971 || !validate_arg (src, POINTER_TYPE)
12972 || !validate_arg (size, INTEGER_TYPE))
12973 return NULL_TREE;
12974
12975 p = c_getstr (src);
12976 /* If the SRC parameter is "", return DEST. */
12977 if (p && *p == '\0')
12978 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12979
12980 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12981 return NULL_TREE;
12982
12983 /* If __builtin_strcat_chk is used, assume strcat is available. */
12984 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12985 if (!fn)
12986 return NULL_TREE;
12987
12988 return build_call_expr_loc (loc, fn, 2, dest, src);
12989 }
12990
12991 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12992 LEN, and SIZE. */
12993
12994 static tree
12995 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12996 tree dest, tree src, tree len, tree size)
12997 {
12998 tree fn;
12999 const char *p;
13000
13001 if (!validate_arg (dest, POINTER_TYPE)
13002 || !validate_arg (src, POINTER_TYPE)
13003 || !validate_arg (size, INTEGER_TYPE)
13004 || !validate_arg (size, INTEGER_TYPE))
13005 return NULL_TREE;
13006
13007 p = c_getstr (src);
13008 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13009 if (p && *p == '\0')
13010 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13011 else if (integer_zerop (len))
13012 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13013
13014 if (! tree_fits_uhwi_p (size))
13015 return NULL_TREE;
13016
13017 if (! integer_all_onesp (size))
13018 {
13019 tree src_len = c_strlen (src, 1);
13020 if (src_len
13021 && tree_fits_uhwi_p (src_len)
13022 && tree_fits_uhwi_p (len)
13023 && ! tree_int_cst_lt (len, src_len))
13024 {
13025 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13026 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13027 if (!fn)
13028 return NULL_TREE;
13029
13030 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13031 }
13032 return NULL_TREE;
13033 }
13034
13035 /* If __builtin_strncat_chk is used, assume strncat is available. */
13036 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13037 if (!fn)
13038 return NULL_TREE;
13039
13040 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13041 }
13042
13043 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13044 Return NULL_TREE if a normal call should be emitted rather than
13045 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13046 or BUILT_IN_VSPRINTF_CHK. */
13047
13048 static tree
13049 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13050 enum built_in_function fcode)
13051 {
13052 tree dest, size, len, fn, fmt, flag;
13053 const char *fmt_str;
13054
13055 /* Verify the required arguments in the original call. */
13056 if (nargs < 4)
13057 return NULL_TREE;
13058 dest = args[0];
13059 if (!validate_arg (dest, POINTER_TYPE))
13060 return NULL_TREE;
13061 flag = args[1];
13062 if (!validate_arg (flag, INTEGER_TYPE))
13063 return NULL_TREE;
13064 size = args[2];
13065 if (!validate_arg (size, INTEGER_TYPE))
13066 return NULL_TREE;
13067 fmt = args[3];
13068 if (!validate_arg (fmt, POINTER_TYPE))
13069 return NULL_TREE;
13070
13071 if (! tree_fits_uhwi_p (size))
13072 return NULL_TREE;
13073
13074 len = NULL_TREE;
13075
13076 if (!init_target_chars ())
13077 return NULL_TREE;
13078
13079 /* Check whether the format is a literal string constant. */
13080 fmt_str = c_getstr (fmt);
13081 if (fmt_str != NULL)
13082 {
13083 /* If the format doesn't contain % args or %%, we know the size. */
13084 if (strchr (fmt_str, target_percent) == 0)
13085 {
13086 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13087 len = build_int_cstu (size_type_node, strlen (fmt_str));
13088 }
13089 /* If the format is "%s" and first ... argument is a string literal,
13090 we know the size too. */
13091 else if (fcode == BUILT_IN_SPRINTF_CHK
13092 && strcmp (fmt_str, target_percent_s) == 0)
13093 {
13094 tree arg;
13095
13096 if (nargs == 5)
13097 {
13098 arg = args[4];
13099 if (validate_arg (arg, POINTER_TYPE))
13100 {
13101 len = c_strlen (arg, 1);
13102 if (! len || ! tree_fits_uhwi_p (len))
13103 len = NULL_TREE;
13104 }
13105 }
13106 }
13107 }
13108
13109 if (! integer_all_onesp (size))
13110 {
13111 if (! len || ! tree_int_cst_lt (len, size))
13112 return NULL_TREE;
13113 }
13114
13115 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13116 or if format doesn't contain % chars or is "%s". */
13117 if (! integer_zerop (flag))
13118 {
13119 if (fmt_str == NULL)
13120 return NULL_TREE;
13121 if (strchr (fmt_str, target_percent) != NULL
13122 && strcmp (fmt_str, target_percent_s))
13123 return NULL_TREE;
13124 }
13125
13126 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13127 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13128 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13129 if (!fn)
13130 return NULL_TREE;
13131
13132 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13133 }
13134
13135 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13136 a normal call should be emitted rather than expanding the function
13137 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13138
13139 static tree
13140 fold_builtin_sprintf_chk (location_t loc, tree exp,
13141 enum built_in_function fcode)
13142 {
13143 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13144 CALL_EXPR_ARGP (exp), fcode);
13145 }
13146
13147 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13148 NULL_TREE if a normal call should be emitted rather than expanding
13149 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13150 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13151 passed as second argument. */
13152
13153 static tree
13154 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13155 tree maxlen, enum built_in_function fcode)
13156 {
13157 tree dest, size, len, fn, fmt, flag;
13158 const char *fmt_str;
13159
13160 /* Verify the required arguments in the original call. */
13161 if (nargs < 5)
13162 return NULL_TREE;
13163 dest = args[0];
13164 if (!validate_arg (dest, POINTER_TYPE))
13165 return NULL_TREE;
13166 len = args[1];
13167 if (!validate_arg (len, INTEGER_TYPE))
13168 return NULL_TREE;
13169 flag = args[2];
13170 if (!validate_arg (flag, INTEGER_TYPE))
13171 return NULL_TREE;
13172 size = args[3];
13173 if (!validate_arg (size, INTEGER_TYPE))
13174 return NULL_TREE;
13175 fmt = args[4];
13176 if (!validate_arg (fmt, POINTER_TYPE))
13177 return NULL_TREE;
13178
13179 if (! tree_fits_uhwi_p (size))
13180 return NULL_TREE;
13181
13182 if (! integer_all_onesp (size))
13183 {
13184 if (! tree_fits_uhwi_p (len))
13185 {
13186 /* If LEN is not constant, try MAXLEN too.
13187 For MAXLEN only allow optimizing into non-_ocs function
13188 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13189 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13190 return NULL_TREE;
13191 }
13192 else
13193 maxlen = len;
13194
13195 if (tree_int_cst_lt (size, maxlen))
13196 return NULL_TREE;
13197 }
13198
13199 if (!init_target_chars ())
13200 return NULL_TREE;
13201
13202 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13203 or if format doesn't contain % chars or is "%s". */
13204 if (! integer_zerop (flag))
13205 {
13206 fmt_str = c_getstr (fmt);
13207 if (fmt_str == NULL)
13208 return NULL_TREE;
13209 if (strchr (fmt_str, target_percent) != NULL
13210 && strcmp (fmt_str, target_percent_s))
13211 return NULL_TREE;
13212 }
13213
13214 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13215 available. */
13216 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13217 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13218 if (!fn)
13219 return NULL_TREE;
13220
13221 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13222 }
13223
13224 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13225 a normal call should be emitted rather than expanding the function
13226 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13227 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13228 passed as second argument. */
13229
13230 static tree
13231 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13232 enum built_in_function fcode)
13233 {
13234 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13235 CALL_EXPR_ARGP (exp), maxlen, fcode);
13236 }
13237
13238 /* Builtins with folding operations that operate on "..." arguments
13239 need special handling; we need to store the arguments in a convenient
13240 data structure before attempting any folding. Fortunately there are
13241 only a few builtins that fall into this category. FNDECL is the
13242 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13243 result of the function call is ignored. */
13244
13245 static tree
13246 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13247 bool ignore ATTRIBUTE_UNUSED)
13248 {
13249 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13250 tree ret = NULL_TREE;
13251
13252 switch (fcode)
13253 {
13254 case BUILT_IN_SPRINTF_CHK:
13255 case BUILT_IN_VSPRINTF_CHK:
13256 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13257 break;
13258
13259 case BUILT_IN_SNPRINTF_CHK:
13260 case BUILT_IN_VSNPRINTF_CHK:
13261 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13262 break;
13263
13264 case BUILT_IN_FPCLASSIFY:
13265 ret = fold_builtin_fpclassify (loc, exp);
13266 break;
13267
13268 default:
13269 break;
13270 }
13271 if (ret)
13272 {
13273 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13274 SET_EXPR_LOCATION (ret, loc);
13275 TREE_NO_WARNING (ret) = 1;
13276 return ret;
13277 }
13278 return NULL_TREE;
13279 }
13280
13281 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13282 FMT and ARG are the arguments to the call; we don't fold cases with
13283 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13284
13285 Return NULL_TREE if no simplification was possible, otherwise return the
13286 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13287 code of the function to be simplified. */
13288
13289 static tree
13290 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13291 tree arg, bool ignore,
13292 enum built_in_function fcode)
13293 {
13294 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13295 const char *fmt_str = NULL;
13296
13297 /* If the return value is used, don't do the transformation. */
13298 if (! ignore)
13299 return NULL_TREE;
13300
13301 /* Verify the required arguments in the original call. */
13302 if (!validate_arg (fmt, POINTER_TYPE))
13303 return NULL_TREE;
13304
13305 /* Check whether the format is a literal string constant. */
13306 fmt_str = c_getstr (fmt);
13307 if (fmt_str == NULL)
13308 return NULL_TREE;
13309
13310 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13311 {
13312 /* If we're using an unlocked function, assume the other
13313 unlocked functions exist explicitly. */
13314 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13315 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13316 }
13317 else
13318 {
13319 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13320 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13321 }
13322
13323 if (!init_target_chars ())
13324 return NULL_TREE;
13325
13326 if (strcmp (fmt_str, target_percent_s) == 0
13327 || strchr (fmt_str, target_percent) == NULL)
13328 {
13329 const char *str;
13330
13331 if (strcmp (fmt_str, target_percent_s) == 0)
13332 {
13333 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13334 return NULL_TREE;
13335
13336 if (!arg || !validate_arg (arg, POINTER_TYPE))
13337 return NULL_TREE;
13338
13339 str = c_getstr (arg);
13340 if (str == NULL)
13341 return NULL_TREE;
13342 }
13343 else
13344 {
13345 /* The format specifier doesn't contain any '%' characters. */
13346 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13347 && arg)
13348 return NULL_TREE;
13349 str = fmt_str;
13350 }
13351
13352 /* If the string was "", printf does nothing. */
13353 if (str[0] == '\0')
13354 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13355
13356 /* If the string has length of 1, call putchar. */
13357 if (str[1] == '\0')
13358 {
13359 /* Given printf("c"), (where c is any one character,)
13360 convert "c"[0] to an int and pass that to the replacement
13361 function. */
13362 newarg = build_int_cst (integer_type_node, str[0]);
13363 if (fn_putchar)
13364 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13365 }
13366 else
13367 {
13368 /* If the string was "string\n", call puts("string"). */
13369 size_t len = strlen (str);
13370 if ((unsigned char)str[len - 1] == target_newline
13371 && (size_t) (int) len == len
13372 && (int) len > 0)
13373 {
13374 char *newstr;
13375 tree offset_node, string_cst;
13376
13377 /* Create a NUL-terminated string that's one char shorter
13378 than the original, stripping off the trailing '\n'. */
13379 newarg = build_string_literal (len, str);
13380 string_cst = string_constant (newarg, &offset_node);
13381 gcc_checking_assert (string_cst
13382 && (TREE_STRING_LENGTH (string_cst)
13383 == (int) len)
13384 && integer_zerop (offset_node)
13385 && (unsigned char)
13386 TREE_STRING_POINTER (string_cst)[len - 1]
13387 == target_newline);
13388 /* build_string_literal creates a new STRING_CST,
13389 modify it in place to avoid double copying. */
13390 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13391 newstr[len - 1] = '\0';
13392 if (fn_puts)
13393 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13394 }
13395 else
13396 /* We'd like to arrange to call fputs(string,stdout) here,
13397 but we need stdout and don't have a way to get it yet. */
13398 return NULL_TREE;
13399 }
13400 }
13401
13402 /* The other optimizations can be done only on the non-va_list variants. */
13403 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13404 return NULL_TREE;
13405
13406 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13407 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13408 {
13409 if (!arg || !validate_arg (arg, POINTER_TYPE))
13410 return NULL_TREE;
13411 if (fn_puts)
13412 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13413 }
13414
13415 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13416 else if (strcmp (fmt_str, target_percent_c) == 0)
13417 {
13418 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13419 return NULL_TREE;
13420 if (fn_putchar)
13421 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13422 }
13423
13424 if (!call)
13425 return NULL_TREE;
13426
13427 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13428 }
13429
13430 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13431 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13432 more than 3 arguments, and ARG may be null in the 2-argument case.
13433
13434 Return NULL_TREE if no simplification was possible, otherwise return the
13435 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13436 code of the function to be simplified. */
13437
13438 static tree
13439 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13440 tree fmt, tree arg, bool ignore,
13441 enum built_in_function fcode)
13442 {
13443 tree fn_fputc, fn_fputs, call = NULL_TREE;
13444 const char *fmt_str = NULL;
13445
13446 /* If the return value is used, don't do the transformation. */
13447 if (! ignore)
13448 return NULL_TREE;
13449
13450 /* Verify the required arguments in the original call. */
13451 if (!validate_arg (fp, POINTER_TYPE))
13452 return NULL_TREE;
13453 if (!validate_arg (fmt, POINTER_TYPE))
13454 return NULL_TREE;
13455
13456 /* Check whether the format is a literal string constant. */
13457 fmt_str = c_getstr (fmt);
13458 if (fmt_str == NULL)
13459 return NULL_TREE;
13460
13461 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13462 {
13463 /* If we're using an unlocked function, assume the other
13464 unlocked functions exist explicitly. */
13465 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13466 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13467 }
13468 else
13469 {
13470 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13471 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13472 }
13473
13474 if (!init_target_chars ())
13475 return NULL_TREE;
13476
13477 /* If the format doesn't contain % args or %%, use strcpy. */
13478 if (strchr (fmt_str, target_percent) == NULL)
13479 {
13480 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13481 && arg)
13482 return NULL_TREE;
13483
13484 /* If the format specifier was "", fprintf does nothing. */
13485 if (fmt_str[0] == '\0')
13486 {
13487 /* If FP has side-effects, just wait until gimplification is
13488 done. */
13489 if (TREE_SIDE_EFFECTS (fp))
13490 return NULL_TREE;
13491
13492 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13493 }
13494
13495 /* When "string" doesn't contain %, replace all cases of
13496 fprintf (fp, string) with fputs (string, fp). The fputs
13497 builtin will take care of special cases like length == 1. */
13498 if (fn_fputs)
13499 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13500 }
13501
13502 /* The other optimizations can be done only on the non-va_list variants. */
13503 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13504 return NULL_TREE;
13505
13506 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13507 else if (strcmp (fmt_str, target_percent_s) == 0)
13508 {
13509 if (!arg || !validate_arg (arg, POINTER_TYPE))
13510 return NULL_TREE;
13511 if (fn_fputs)
13512 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13513 }
13514
13515 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13516 else if (strcmp (fmt_str, target_percent_c) == 0)
13517 {
13518 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13519 return NULL_TREE;
13520 if (fn_fputc)
13521 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13522 }
13523
13524 if (!call)
13525 return NULL_TREE;
13526 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13527 }
13528
13529 /* Initialize format string characters in the target charset. */
13530
13531 static bool
13532 init_target_chars (void)
13533 {
13534 static bool init;
13535 if (!init)
13536 {
13537 target_newline = lang_hooks.to_target_charset ('\n');
13538 target_percent = lang_hooks.to_target_charset ('%');
13539 target_c = lang_hooks.to_target_charset ('c');
13540 target_s = lang_hooks.to_target_charset ('s');
13541 if (target_newline == 0 || target_percent == 0 || target_c == 0
13542 || target_s == 0)
13543 return false;
13544
13545 target_percent_c[0] = target_percent;
13546 target_percent_c[1] = target_c;
13547 target_percent_c[2] = '\0';
13548
13549 target_percent_s[0] = target_percent;
13550 target_percent_s[1] = target_s;
13551 target_percent_s[2] = '\0';
13552
13553 target_percent_s_newline[0] = target_percent;
13554 target_percent_s_newline[1] = target_s;
13555 target_percent_s_newline[2] = target_newline;
13556 target_percent_s_newline[3] = '\0';
13557
13558 init = true;
13559 }
13560 return true;
13561 }
13562
13563 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13564 and no overflow/underflow occurred. INEXACT is true if M was not
13565 exactly calculated. TYPE is the tree type for the result. This
13566 function assumes that you cleared the MPFR flags and then
13567 calculated M to see if anything subsequently set a flag prior to
13568 entering this function. Return NULL_TREE if any checks fail. */
13569
13570 static tree
13571 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13572 {
13573 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13574 overflow/underflow occurred. If -frounding-math, proceed iff the
13575 result of calling FUNC was exact. */
13576 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13577 && (!flag_rounding_math || !inexact))
13578 {
13579 REAL_VALUE_TYPE rr;
13580
13581 real_from_mpfr (&rr, m, type, GMP_RNDN);
13582 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13583 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13584 but the mpft_t is not, then we underflowed in the
13585 conversion. */
13586 if (real_isfinite (&rr)
13587 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13588 {
13589 REAL_VALUE_TYPE rmode;
13590
13591 real_convert (&rmode, TYPE_MODE (type), &rr);
13592 /* Proceed iff the specified mode can hold the value. */
13593 if (real_identical (&rmode, &rr))
13594 return build_real (type, rmode);
13595 }
13596 }
13597 return NULL_TREE;
13598 }
13599
13600 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13601 number and no overflow/underflow occurred. INEXACT is true if M
13602 was not exactly calculated. TYPE is the tree type for the result.
13603 This function assumes that you cleared the MPFR flags and then
13604 calculated M to see if anything subsequently set a flag prior to
13605 entering this function. Return NULL_TREE if any checks fail, if
13606 FORCE_CONVERT is true, then bypass the checks. */
13607
13608 static tree
13609 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13610 {
13611 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13612 overflow/underflow occurred. If -frounding-math, proceed iff the
13613 result of calling FUNC was exact. */
13614 if (force_convert
13615 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13616 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13617 && (!flag_rounding_math || !inexact)))
13618 {
13619 REAL_VALUE_TYPE re, im;
13620
13621 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13622 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13623 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13624 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13625 but the mpft_t is not, then we underflowed in the
13626 conversion. */
13627 if (force_convert
13628 || (real_isfinite (&re) && real_isfinite (&im)
13629 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13630 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13631 {
13632 REAL_VALUE_TYPE re_mode, im_mode;
13633
13634 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13635 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13636 /* Proceed iff the specified mode can hold the value. */
13637 if (force_convert
13638 || (real_identical (&re_mode, &re)
13639 && real_identical (&im_mode, &im)))
13640 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13641 build_real (TREE_TYPE (type), im_mode));
13642 }
13643 }
13644 return NULL_TREE;
13645 }
13646
13647 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13648 FUNC on it and return the resulting value as a tree with type TYPE.
13649 If MIN and/or MAX are not NULL, then the supplied ARG must be
13650 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13651 acceptable values, otherwise they are not. The mpfr precision is
13652 set to the precision of TYPE. We assume that function FUNC returns
13653 zero if the result could be calculated exactly within the requested
13654 precision. */
13655
13656 static tree
13657 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13658 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13659 bool inclusive)
13660 {
13661 tree result = NULL_TREE;
13662
13663 STRIP_NOPS (arg);
13664
13665 /* To proceed, MPFR must exactly represent the target floating point
13666 format, which only happens when the target base equals two. */
13667 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13668 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13669 {
13670 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13671
13672 if (real_isfinite (ra)
13673 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13674 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13675 {
13676 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13677 const int prec = fmt->p;
13678 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13679 int inexact;
13680 mpfr_t m;
13681
13682 mpfr_init2 (m, prec);
13683 mpfr_from_real (m, ra, GMP_RNDN);
13684 mpfr_clear_flags ();
13685 inexact = func (m, m, rnd);
13686 result = do_mpfr_ckconv (m, type, inexact);
13687 mpfr_clear (m);
13688 }
13689 }
13690
13691 return result;
13692 }
13693
13694 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13695 FUNC on it and return the resulting value as a tree with type TYPE.
13696 The mpfr precision is set to the precision of TYPE. We assume that
13697 function FUNC returns zero if the result could be calculated
13698 exactly within the requested precision. */
13699
13700 static tree
13701 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13702 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13703 {
13704 tree result = NULL_TREE;
13705
13706 STRIP_NOPS (arg1);
13707 STRIP_NOPS (arg2);
13708
13709 /* To proceed, MPFR must exactly represent the target floating point
13710 format, which only happens when the target base equals two. */
13711 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13712 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13713 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13714 {
13715 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13716 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13717
13718 if (real_isfinite (ra1) && real_isfinite (ra2))
13719 {
13720 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13721 const int prec = fmt->p;
13722 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13723 int inexact;
13724 mpfr_t m1, m2;
13725
13726 mpfr_inits2 (prec, m1, m2, NULL);
13727 mpfr_from_real (m1, ra1, GMP_RNDN);
13728 mpfr_from_real (m2, ra2, GMP_RNDN);
13729 mpfr_clear_flags ();
13730 inexact = func (m1, m1, m2, rnd);
13731 result = do_mpfr_ckconv (m1, type, inexact);
13732 mpfr_clears (m1, m2, NULL);
13733 }
13734 }
13735
13736 return result;
13737 }
13738
13739 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13740 FUNC on it and return the resulting value as a tree with type TYPE.
13741 The mpfr precision is set to the precision of TYPE. We assume that
13742 function FUNC returns zero if the result could be calculated
13743 exactly within the requested precision. */
13744
13745 static tree
13746 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13747 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13748 {
13749 tree result = NULL_TREE;
13750
13751 STRIP_NOPS (arg1);
13752 STRIP_NOPS (arg2);
13753 STRIP_NOPS (arg3);
13754
13755 /* To proceed, MPFR must exactly represent the target floating point
13756 format, which only happens when the target base equals two. */
13757 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13758 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13759 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13760 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13761 {
13762 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13763 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13764 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13765
13766 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13767 {
13768 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13769 const int prec = fmt->p;
13770 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13771 int inexact;
13772 mpfr_t m1, m2, m3;
13773
13774 mpfr_inits2 (prec, m1, m2, m3, NULL);
13775 mpfr_from_real (m1, ra1, GMP_RNDN);
13776 mpfr_from_real (m2, ra2, GMP_RNDN);
13777 mpfr_from_real (m3, ra3, GMP_RNDN);
13778 mpfr_clear_flags ();
13779 inexact = func (m1, m1, m2, m3, rnd);
13780 result = do_mpfr_ckconv (m1, type, inexact);
13781 mpfr_clears (m1, m2, m3, NULL);
13782 }
13783 }
13784
13785 return result;
13786 }
13787
13788 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13789 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13790 If ARG_SINP and ARG_COSP are NULL then the result is returned
13791 as a complex value.
13792 The type is taken from the type of ARG and is used for setting the
13793 precision of the calculation and results. */
13794
13795 static tree
13796 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13797 {
13798 tree const type = TREE_TYPE (arg);
13799 tree result = NULL_TREE;
13800
13801 STRIP_NOPS (arg);
13802
13803 /* To proceed, MPFR must exactly represent the target floating point
13804 format, which only happens when the target base equals two. */
13805 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13806 && TREE_CODE (arg) == REAL_CST
13807 && !TREE_OVERFLOW (arg))
13808 {
13809 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13810
13811 if (real_isfinite (ra))
13812 {
13813 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13814 const int prec = fmt->p;
13815 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13816 tree result_s, result_c;
13817 int inexact;
13818 mpfr_t m, ms, mc;
13819
13820 mpfr_inits2 (prec, m, ms, mc, NULL);
13821 mpfr_from_real (m, ra, GMP_RNDN);
13822 mpfr_clear_flags ();
13823 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13824 result_s = do_mpfr_ckconv (ms, type, inexact);
13825 result_c = do_mpfr_ckconv (mc, type, inexact);
13826 mpfr_clears (m, ms, mc, NULL);
13827 if (result_s && result_c)
13828 {
13829 /* If we are to return in a complex value do so. */
13830 if (!arg_sinp && !arg_cosp)
13831 return build_complex (build_complex_type (type),
13832 result_c, result_s);
13833
13834 /* Dereference the sin/cos pointer arguments. */
13835 arg_sinp = build_fold_indirect_ref (arg_sinp);
13836 arg_cosp = build_fold_indirect_ref (arg_cosp);
13837 /* Proceed if valid pointer type were passed in. */
13838 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13839 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13840 {
13841 /* Set the values. */
13842 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13843 result_s);
13844 TREE_SIDE_EFFECTS (result_s) = 1;
13845 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13846 result_c);
13847 TREE_SIDE_EFFECTS (result_c) = 1;
13848 /* Combine the assignments into a compound expr. */
13849 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13850 result_s, result_c));
13851 }
13852 }
13853 }
13854 }
13855 return result;
13856 }
13857
13858 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13859 two-argument mpfr order N Bessel function FUNC on them and return
13860 the resulting value as a tree with type TYPE. The mpfr precision
13861 is set to the precision of TYPE. We assume that function FUNC
13862 returns zero if the result could be calculated exactly within the
13863 requested precision. */
13864 static tree
13865 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13866 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13867 const REAL_VALUE_TYPE *min, bool inclusive)
13868 {
13869 tree result = NULL_TREE;
13870
13871 STRIP_NOPS (arg1);
13872 STRIP_NOPS (arg2);
13873
13874 /* To proceed, MPFR must exactly represent the target floating point
13875 format, which only happens when the target base equals two. */
13876 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13877 && tree_fits_shwi_p (arg1)
13878 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13879 {
13880 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13881 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13882
13883 if (n == (long)n
13884 && real_isfinite (ra)
13885 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13886 {
13887 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13888 const int prec = fmt->p;
13889 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13890 int inexact;
13891 mpfr_t m;
13892
13893 mpfr_init2 (m, prec);
13894 mpfr_from_real (m, ra, GMP_RNDN);
13895 mpfr_clear_flags ();
13896 inexact = func (m, n, m, rnd);
13897 result = do_mpfr_ckconv (m, type, inexact);
13898 mpfr_clear (m);
13899 }
13900 }
13901
13902 return result;
13903 }
13904
13905 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13906 the pointer *(ARG_QUO) and return the result. The type is taken
13907 from the type of ARG0 and is used for setting the precision of the
13908 calculation and results. */
13909
13910 static tree
13911 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13912 {
13913 tree const type = TREE_TYPE (arg0);
13914 tree result = NULL_TREE;
13915
13916 STRIP_NOPS (arg0);
13917 STRIP_NOPS (arg1);
13918
13919 /* To proceed, MPFR must exactly represent the target floating point
13920 format, which only happens when the target base equals two. */
13921 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13922 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13923 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13924 {
13925 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13926 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13927
13928 if (real_isfinite (ra0) && real_isfinite (ra1))
13929 {
13930 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13931 const int prec = fmt->p;
13932 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13933 tree result_rem;
13934 long integer_quo;
13935 mpfr_t m0, m1;
13936
13937 mpfr_inits2 (prec, m0, m1, NULL);
13938 mpfr_from_real (m0, ra0, GMP_RNDN);
13939 mpfr_from_real (m1, ra1, GMP_RNDN);
13940 mpfr_clear_flags ();
13941 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13942 /* Remquo is independent of the rounding mode, so pass
13943 inexact=0 to do_mpfr_ckconv(). */
13944 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13945 mpfr_clears (m0, m1, NULL);
13946 if (result_rem)
13947 {
13948 /* MPFR calculates quo in the host's long so it may
13949 return more bits in quo than the target int can hold
13950 if sizeof(host long) > sizeof(target int). This can
13951 happen even for native compilers in LP64 mode. In
13952 these cases, modulo the quo value with the largest
13953 number that the target int can hold while leaving one
13954 bit for the sign. */
13955 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13956 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13957
13958 /* Dereference the quo pointer argument. */
13959 arg_quo = build_fold_indirect_ref (arg_quo);
13960 /* Proceed iff a valid pointer type was passed in. */
13961 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13962 {
13963 /* Set the value. */
13964 tree result_quo
13965 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13966 build_int_cst (TREE_TYPE (arg_quo),
13967 integer_quo));
13968 TREE_SIDE_EFFECTS (result_quo) = 1;
13969 /* Combine the quo assignment with the rem. */
13970 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13971 result_quo, result_rem));
13972 }
13973 }
13974 }
13975 }
13976 return result;
13977 }
13978
13979 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13980 resulting value as a tree with type TYPE. The mpfr precision is
13981 set to the precision of TYPE. We assume that this mpfr function
13982 returns zero if the result could be calculated exactly within the
13983 requested precision. In addition, the integer pointer represented
13984 by ARG_SG will be dereferenced and set to the appropriate signgam
13985 (-1,1) value. */
13986
13987 static tree
13988 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13989 {
13990 tree result = NULL_TREE;
13991
13992 STRIP_NOPS (arg);
13993
13994 /* To proceed, MPFR must exactly represent the target floating point
13995 format, which only happens when the target base equals two. Also
13996 verify ARG is a constant and that ARG_SG is an int pointer. */
13997 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13998 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13999 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14000 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14001 {
14002 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14003
14004 /* In addition to NaN and Inf, the argument cannot be zero or a
14005 negative integer. */
14006 if (real_isfinite (ra)
14007 && ra->cl != rvc_zero
14008 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
14009 {
14010 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14011 const int prec = fmt->p;
14012 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14013 int inexact, sg;
14014 mpfr_t m;
14015 tree result_lg;
14016
14017 mpfr_init2 (m, prec);
14018 mpfr_from_real (m, ra, GMP_RNDN);
14019 mpfr_clear_flags ();
14020 inexact = mpfr_lgamma (m, &sg, m, rnd);
14021 result_lg = do_mpfr_ckconv (m, type, inexact);
14022 mpfr_clear (m);
14023 if (result_lg)
14024 {
14025 tree result_sg;
14026
14027 /* Dereference the arg_sg pointer argument. */
14028 arg_sg = build_fold_indirect_ref (arg_sg);
14029 /* Assign the signgam value into *arg_sg. */
14030 result_sg = fold_build2 (MODIFY_EXPR,
14031 TREE_TYPE (arg_sg), arg_sg,
14032 build_int_cst (TREE_TYPE (arg_sg), sg));
14033 TREE_SIDE_EFFECTS (result_sg) = 1;
14034 /* Combine the signgam assignment with the lgamma result. */
14035 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14036 result_sg, result_lg));
14037 }
14038 }
14039 }
14040
14041 return result;
14042 }
14043
14044 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14045 function FUNC on it and return the resulting value as a tree with
14046 type TYPE. The mpfr precision is set to the precision of TYPE. We
14047 assume that function FUNC returns zero if the result could be
14048 calculated exactly within the requested precision. */
14049
14050 static tree
14051 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14052 {
14053 tree result = NULL_TREE;
14054
14055 STRIP_NOPS (arg);
14056
14057 /* To proceed, MPFR must exactly represent the target floating point
14058 format, which only happens when the target base equals two. */
14059 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14060 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14061 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14062 {
14063 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14064 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14065
14066 if (real_isfinite (re) && real_isfinite (im))
14067 {
14068 const struct real_format *const fmt =
14069 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14070 const int prec = fmt->p;
14071 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14072 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14073 int inexact;
14074 mpc_t m;
14075
14076 mpc_init2 (m, prec);
14077 mpfr_from_real (mpc_realref (m), re, rnd);
14078 mpfr_from_real (mpc_imagref (m), im, rnd);
14079 mpfr_clear_flags ();
14080 inexact = func (m, m, crnd);
14081 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14082 mpc_clear (m);
14083 }
14084 }
14085
14086 return result;
14087 }
14088
14089 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14090 mpc function FUNC on it and return the resulting value as a tree
14091 with type TYPE. The mpfr precision is set to the precision of
14092 TYPE. We assume that function FUNC returns zero if the result
14093 could be calculated exactly within the requested precision. If
14094 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14095 in the arguments and/or results. */
14096
14097 tree
14098 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14099 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14100 {
14101 tree result = NULL_TREE;
14102
14103 STRIP_NOPS (arg0);
14104 STRIP_NOPS (arg1);
14105
14106 /* To proceed, MPFR must exactly represent the target floating point
14107 format, which only happens when the target base equals two. */
14108 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14109 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14110 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14111 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14112 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14113 {
14114 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14115 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14116 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14117 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14118
14119 if (do_nonfinite
14120 || (real_isfinite (re0) && real_isfinite (im0)
14121 && real_isfinite (re1) && real_isfinite (im1)))
14122 {
14123 const struct real_format *const fmt =
14124 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14125 const int prec = fmt->p;
14126 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14127 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14128 int inexact;
14129 mpc_t m0, m1;
14130
14131 mpc_init2 (m0, prec);
14132 mpc_init2 (m1, prec);
14133 mpfr_from_real (mpc_realref (m0), re0, rnd);
14134 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14135 mpfr_from_real (mpc_realref (m1), re1, rnd);
14136 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14137 mpfr_clear_flags ();
14138 inexact = func (m0, m0, m1, crnd);
14139 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14140 mpc_clear (m0);
14141 mpc_clear (m1);
14142 }
14143 }
14144
14145 return result;
14146 }
14147
14148 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14149 a normal call should be emitted rather than expanding the function
14150 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14151
14152 static tree
14153 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14154 {
14155 int nargs = gimple_call_num_args (stmt);
14156
14157 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14158 (nargs > 0
14159 ? gimple_call_arg_ptr (stmt, 0)
14160 : &error_mark_node), fcode);
14161 }
14162
14163 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14164 a normal call should be emitted rather than expanding the function
14165 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14166 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14167 passed as second argument. */
14168
14169 tree
14170 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14171 enum built_in_function fcode)
14172 {
14173 int nargs = gimple_call_num_args (stmt);
14174
14175 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14176 (nargs > 0
14177 ? gimple_call_arg_ptr (stmt, 0)
14178 : &error_mark_node), maxlen, fcode);
14179 }
14180
14181 /* Builtins with folding operations that operate on "..." arguments
14182 need special handling; we need to store the arguments in a convenient
14183 data structure before attempting any folding. Fortunately there are
14184 only a few builtins that fall into this category. FNDECL is the
14185 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14186 result of the function call is ignored. */
14187
14188 static tree
14189 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14190 bool ignore ATTRIBUTE_UNUSED)
14191 {
14192 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14193 tree ret = NULL_TREE;
14194
14195 switch (fcode)
14196 {
14197 case BUILT_IN_SPRINTF_CHK:
14198 case BUILT_IN_VSPRINTF_CHK:
14199 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14200 break;
14201
14202 case BUILT_IN_SNPRINTF_CHK:
14203 case BUILT_IN_VSNPRINTF_CHK:
14204 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14205
14206 default:
14207 break;
14208 }
14209 if (ret)
14210 {
14211 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14212 TREE_NO_WARNING (ret) = 1;
14213 return ret;
14214 }
14215 return NULL_TREE;
14216 }
14217
14218 /* A wrapper function for builtin folding that prevents warnings for
14219 "statement without effect" and the like, caused by removing the
14220 call node earlier than the warning is generated. */
14221
14222 tree
14223 fold_call_stmt (gimple stmt, bool ignore)
14224 {
14225 tree ret = NULL_TREE;
14226 tree fndecl = gimple_call_fndecl (stmt);
14227 location_t loc = gimple_location (stmt);
14228 if (fndecl
14229 && TREE_CODE (fndecl) == FUNCTION_DECL
14230 && DECL_BUILT_IN (fndecl)
14231 && !gimple_call_va_arg_pack_p (stmt))
14232 {
14233 int nargs = gimple_call_num_args (stmt);
14234 tree *args = (nargs > 0
14235 ? gimple_call_arg_ptr (stmt, 0)
14236 : &error_mark_node);
14237
14238 if (avoid_folding_inline_builtin (fndecl))
14239 return NULL_TREE;
14240 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14241 {
14242 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14243 }
14244 else
14245 {
14246 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14247 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14248 if (!ret)
14249 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14250 if (ret)
14251 {
14252 /* Propagate location information from original call to
14253 expansion of builtin. Otherwise things like
14254 maybe_emit_chk_warning, that operate on the expansion
14255 of a builtin, will use the wrong location information. */
14256 if (gimple_has_location (stmt))
14257 {
14258 tree realret = ret;
14259 if (TREE_CODE (ret) == NOP_EXPR)
14260 realret = TREE_OPERAND (ret, 0);
14261 if (CAN_HAVE_LOCATION_P (realret)
14262 && !EXPR_HAS_LOCATION (realret))
14263 SET_EXPR_LOCATION (realret, loc);
14264 return realret;
14265 }
14266 return ret;
14267 }
14268 }
14269 }
14270 return NULL_TREE;
14271 }
14272
14273 /* Look up the function in builtin_decl that corresponds to DECL
14274 and set ASMSPEC as its user assembler name. DECL must be a
14275 function decl that declares a builtin. */
14276
14277 void
14278 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14279 {
14280 tree builtin;
14281 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14282 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14283 && asmspec != 0);
14284
14285 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14286 set_user_assembler_name (builtin, asmspec);
14287 switch (DECL_FUNCTION_CODE (decl))
14288 {
14289 case BUILT_IN_MEMCPY:
14290 init_block_move_fn (asmspec);
14291 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14292 break;
14293 case BUILT_IN_MEMSET:
14294 init_block_clear_fn (asmspec);
14295 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14296 break;
14297 case BUILT_IN_MEMMOVE:
14298 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14299 break;
14300 case BUILT_IN_MEMCMP:
14301 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14302 break;
14303 case BUILT_IN_ABORT:
14304 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14305 break;
14306 case BUILT_IN_FFS:
14307 if (INT_TYPE_SIZE < BITS_PER_WORD)
14308 {
14309 set_user_assembler_libfunc ("ffs", asmspec);
14310 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14311 MODE_INT, 0), "ffs");
14312 }
14313 break;
14314 default:
14315 break;
14316 }
14317 }
14318
14319 /* Return true if DECL is a builtin that expands to a constant or similarly
14320 simple code. */
14321 bool
14322 is_simple_builtin (tree decl)
14323 {
14324 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14325 switch (DECL_FUNCTION_CODE (decl))
14326 {
14327 /* Builtins that expand to constants. */
14328 case BUILT_IN_CONSTANT_P:
14329 case BUILT_IN_EXPECT:
14330 case BUILT_IN_OBJECT_SIZE:
14331 case BUILT_IN_UNREACHABLE:
14332 /* Simple register moves or loads from stack. */
14333 case BUILT_IN_ASSUME_ALIGNED:
14334 case BUILT_IN_RETURN_ADDRESS:
14335 case BUILT_IN_EXTRACT_RETURN_ADDR:
14336 case BUILT_IN_FROB_RETURN_ADDR:
14337 case BUILT_IN_RETURN:
14338 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14339 case BUILT_IN_FRAME_ADDRESS:
14340 case BUILT_IN_VA_END:
14341 case BUILT_IN_STACK_SAVE:
14342 case BUILT_IN_STACK_RESTORE:
14343 /* Exception state returns or moves registers around. */
14344 case BUILT_IN_EH_FILTER:
14345 case BUILT_IN_EH_POINTER:
14346 case BUILT_IN_EH_COPY_VALUES:
14347 return true;
14348
14349 default:
14350 return false;
14351 }
14352
14353 return false;
14354 }
14355
14356 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14357 most probably expanded inline into reasonably simple code. This is a
14358 superset of is_simple_builtin. */
14359 bool
14360 is_inexpensive_builtin (tree decl)
14361 {
14362 if (!decl)
14363 return false;
14364 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14365 return true;
14366 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14367 switch (DECL_FUNCTION_CODE (decl))
14368 {
14369 case BUILT_IN_ABS:
14370 case BUILT_IN_ALLOCA:
14371 case BUILT_IN_ALLOCA_WITH_ALIGN:
14372 case BUILT_IN_BSWAP16:
14373 case BUILT_IN_BSWAP32:
14374 case BUILT_IN_BSWAP64:
14375 case BUILT_IN_CLZ:
14376 case BUILT_IN_CLZIMAX:
14377 case BUILT_IN_CLZL:
14378 case BUILT_IN_CLZLL:
14379 case BUILT_IN_CTZ:
14380 case BUILT_IN_CTZIMAX:
14381 case BUILT_IN_CTZL:
14382 case BUILT_IN_CTZLL:
14383 case BUILT_IN_FFS:
14384 case BUILT_IN_FFSIMAX:
14385 case BUILT_IN_FFSL:
14386 case BUILT_IN_FFSLL:
14387 case BUILT_IN_IMAXABS:
14388 case BUILT_IN_FINITE:
14389 case BUILT_IN_FINITEF:
14390 case BUILT_IN_FINITEL:
14391 case BUILT_IN_FINITED32:
14392 case BUILT_IN_FINITED64:
14393 case BUILT_IN_FINITED128:
14394 case BUILT_IN_FPCLASSIFY:
14395 case BUILT_IN_ISFINITE:
14396 case BUILT_IN_ISINF_SIGN:
14397 case BUILT_IN_ISINF:
14398 case BUILT_IN_ISINFF:
14399 case BUILT_IN_ISINFL:
14400 case BUILT_IN_ISINFD32:
14401 case BUILT_IN_ISINFD64:
14402 case BUILT_IN_ISINFD128:
14403 case BUILT_IN_ISNAN:
14404 case BUILT_IN_ISNANF:
14405 case BUILT_IN_ISNANL:
14406 case BUILT_IN_ISNAND32:
14407 case BUILT_IN_ISNAND64:
14408 case BUILT_IN_ISNAND128:
14409 case BUILT_IN_ISNORMAL:
14410 case BUILT_IN_ISGREATER:
14411 case BUILT_IN_ISGREATEREQUAL:
14412 case BUILT_IN_ISLESS:
14413 case BUILT_IN_ISLESSEQUAL:
14414 case BUILT_IN_ISLESSGREATER:
14415 case BUILT_IN_ISUNORDERED:
14416 case BUILT_IN_VA_ARG_PACK:
14417 case BUILT_IN_VA_ARG_PACK_LEN:
14418 case BUILT_IN_VA_COPY:
14419 case BUILT_IN_TRAP:
14420 case BUILT_IN_SAVEREGS:
14421 case BUILT_IN_POPCOUNTL:
14422 case BUILT_IN_POPCOUNTLL:
14423 case BUILT_IN_POPCOUNTIMAX:
14424 case BUILT_IN_POPCOUNT:
14425 case BUILT_IN_PARITYL:
14426 case BUILT_IN_PARITYLL:
14427 case BUILT_IN_PARITYIMAX:
14428 case BUILT_IN_PARITY:
14429 case BUILT_IN_LABS:
14430 case BUILT_IN_LLABS:
14431 case BUILT_IN_PREFETCH:
14432 return true;
14433
14434 default:
14435 return is_simple_builtin (decl);
14436 }
14437
14438 return false;
14439 }