Fold __builtin_memchr function
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70
71
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
76
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
83 {
84 #include "builtins.def"
85 };
86
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
90
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
93
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
138 static rtx expand_builtin_alloca (tree, bool);
139 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static tree stabilize_va_list_loc (location_t, tree, int);
142 static rtx expand_builtin_expect (tree, rtx);
143 static tree fold_builtin_constant_p (tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
152 static tree fold_builtin_isascii (location_t, tree);
153 static tree fold_builtin_toascii (location_t, tree);
154 static tree fold_builtin_isdigit (location_t, tree);
155 static tree fold_builtin_fabs (location_t, tree, tree);
156 static tree fold_builtin_abs (location_t, tree, tree);
157 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
158 enum tree_code);
159 static tree fold_builtin_0 (location_t, tree);
160 static tree fold_builtin_1 (location_t, tree, tree);
161 static tree fold_builtin_2 (location_t, tree, tree, tree);
162 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
163 static tree fold_builtin_varargs (location_t, tree, tree*, int);
164
165 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
166 static tree fold_builtin_strstr (location_t, tree, tree, tree);
167 static tree fold_builtin_strspn (location_t, tree, tree);
168 static tree fold_builtin_strcspn (location_t, tree, tree);
169
170 static rtx expand_builtin_object_size (tree);
171 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
172 enum built_in_function);
173 static void maybe_emit_chk_warning (tree, enum built_in_function);
174 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
175 static void maybe_emit_free_warning (tree);
176 static tree fold_builtin_object_size (tree, tree);
177
178 unsigned HOST_WIDE_INT target_newline;
179 unsigned HOST_WIDE_INT target_percent;
180 static unsigned HOST_WIDE_INT target_c;
181 static unsigned HOST_WIDE_INT target_s;
182 char target_percent_c[3];
183 char target_percent_s[3];
184 char target_percent_s_newline[4];
185 static tree do_mpfr_remquo (tree, tree, tree);
186 static tree do_mpfr_lgamma_r (tree, tree, tree);
187 static void expand_builtin_sync_synchronize (void);
188
189 /* Return true if NAME starts with __builtin_ or __sync_. */
190
191 static bool
192 is_builtin_name (const char *name)
193 {
194 if (strncmp (name, "__builtin_", 10) == 0)
195 return true;
196 if (strncmp (name, "__sync_", 7) == 0)
197 return true;
198 if (strncmp (name, "__atomic_", 9) == 0)
199 return true;
200 if (flag_cilkplus
201 && (!strcmp (name, "__cilkrts_detach")
202 || !strcmp (name, "__cilkrts_pop_frame")))
203 return true;
204 return false;
205 }
206
207
208 /* Return true if DECL is a function symbol representing a built-in. */
209
210 bool
211 is_builtin_fn (tree decl)
212 {
213 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
214 }
215
216 /* Return true if NODE should be considered for inline expansion regardless
217 of the optimization level. This means whenever a function is invoked with
218 its "internal" name, which normally contains the prefix "__builtin". */
219
220 bool
221 called_as_built_in (tree node)
222 {
223 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
224 we want the name used to call the function, not the name it
225 will have. */
226 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
227 return is_builtin_name (name);
228 }
229
230 /* Compute values M and N such that M divides (address of EXP - N) and such
231 that N < M. If these numbers can be determined, store M in alignp and N in
232 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
233 *alignp and any bit-offset to *bitposp.
234
235 Note that the address (and thus the alignment) computed here is based
236 on the address to which a symbol resolves, whereas DECL_ALIGN is based
237 on the address at which an object is actually located. These two
238 addresses are not always the same. For example, on ARM targets,
239 the address &foo of a Thumb function foo() has the lowest bit set,
240 whereas foo() itself starts on an even address.
241
242 If ADDR_P is true we are taking the address of the memory reference EXP
243 and thus cannot rely on the access taking place. */
244
245 static bool
246 get_object_alignment_2 (tree exp, unsigned int *alignp,
247 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
248 {
249 HOST_WIDE_INT bitsize, bitpos;
250 tree offset;
251 machine_mode mode;
252 int unsignedp, reversep, volatilep;
253 unsigned int align = BITS_PER_UNIT;
254 bool known_alignment = false;
255
256 /* Get the innermost object and the constant (bitpos) and possibly
257 variable (offset) offset of the access. */
258 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
259 &unsignedp, &reversep, &volatilep);
260
261 /* Extract alignment information from the innermost object and
262 possibly adjust bitpos and offset. */
263 if (TREE_CODE (exp) == FUNCTION_DECL)
264 {
265 /* Function addresses can encode extra information besides their
266 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
267 allows the low bit to be used as a virtual bit, we know
268 that the address itself must be at least 2-byte aligned. */
269 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
270 align = 2 * BITS_PER_UNIT;
271 }
272 else if (TREE_CODE (exp) == LABEL_DECL)
273 ;
274 else if (TREE_CODE (exp) == CONST_DECL)
275 {
276 /* The alignment of a CONST_DECL is determined by its initializer. */
277 exp = DECL_INITIAL (exp);
278 align = TYPE_ALIGN (TREE_TYPE (exp));
279 if (CONSTANT_CLASS_P (exp))
280 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
281
282 known_alignment = true;
283 }
284 else if (DECL_P (exp))
285 {
286 align = DECL_ALIGN (exp);
287 known_alignment = true;
288 }
289 else if (TREE_CODE (exp) == INDIRECT_REF
290 || TREE_CODE (exp) == MEM_REF
291 || TREE_CODE (exp) == TARGET_MEM_REF)
292 {
293 tree addr = TREE_OPERAND (exp, 0);
294 unsigned ptr_align;
295 unsigned HOST_WIDE_INT ptr_bitpos;
296 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
297
298 /* If the address is explicitely aligned, handle that. */
299 if (TREE_CODE (addr) == BIT_AND_EXPR
300 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
301 {
302 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
303 ptr_bitmask *= BITS_PER_UNIT;
304 align = least_bit_hwi (ptr_bitmask);
305 addr = TREE_OPERAND (addr, 0);
306 }
307
308 known_alignment
309 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
310 align = MAX (ptr_align, align);
311
312 /* Re-apply explicit alignment to the bitpos. */
313 ptr_bitpos &= ptr_bitmask;
314
315 /* The alignment of the pointer operand in a TARGET_MEM_REF
316 has to take the variable offset parts into account. */
317 if (TREE_CODE (exp) == TARGET_MEM_REF)
318 {
319 if (TMR_INDEX (exp))
320 {
321 unsigned HOST_WIDE_INT step = 1;
322 if (TMR_STEP (exp))
323 step = TREE_INT_CST_LOW (TMR_STEP (exp));
324 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
325 }
326 if (TMR_INDEX2 (exp))
327 align = BITS_PER_UNIT;
328 known_alignment = false;
329 }
330
331 /* When EXP is an actual memory reference then we can use
332 TYPE_ALIGN of a pointer indirection to derive alignment.
333 Do so only if get_pointer_alignment_1 did not reveal absolute
334 alignment knowledge and if using that alignment would
335 improve the situation. */
336 if (!addr_p && !known_alignment
337 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
338 align = TYPE_ALIGN (TREE_TYPE (exp));
339 else
340 {
341 /* Else adjust bitpos accordingly. */
342 bitpos += ptr_bitpos;
343 if (TREE_CODE (exp) == MEM_REF
344 || TREE_CODE (exp) == TARGET_MEM_REF)
345 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
346 }
347 }
348 else if (TREE_CODE (exp) == STRING_CST)
349 {
350 /* STRING_CST are the only constant objects we allow to be not
351 wrapped inside a CONST_DECL. */
352 align = TYPE_ALIGN (TREE_TYPE (exp));
353 if (CONSTANT_CLASS_P (exp))
354 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
355
356 known_alignment = true;
357 }
358
359 /* If there is a non-constant offset part extract the maximum
360 alignment that can prevail. */
361 if (offset)
362 {
363 unsigned int trailing_zeros = tree_ctz (offset);
364 if (trailing_zeros < HOST_BITS_PER_INT)
365 {
366 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
367 if (inner)
368 align = MIN (align, inner);
369 }
370 }
371
372 *alignp = align;
373 *bitposp = bitpos & (*alignp - 1);
374 return known_alignment;
375 }
376
377 /* For a memory reference expression EXP compute values M and N such that M
378 divides (&EXP - N) and such that N < M. If these numbers can be determined,
379 store M in alignp and N in *BITPOSP and return true. Otherwise return false
380 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
381
382 bool
383 get_object_alignment_1 (tree exp, unsigned int *alignp,
384 unsigned HOST_WIDE_INT *bitposp)
385 {
386 return get_object_alignment_2 (exp, alignp, bitposp, false);
387 }
388
389 /* Return the alignment in bits of EXP, an object. */
390
391 unsigned int
392 get_object_alignment (tree exp)
393 {
394 unsigned HOST_WIDE_INT bitpos = 0;
395 unsigned int align;
396
397 get_object_alignment_1 (exp, &align, &bitpos);
398
399 /* align and bitpos now specify known low bits of the pointer.
400 ptr & (align - 1) == bitpos. */
401
402 if (bitpos != 0)
403 align = least_bit_hwi (bitpos);
404 return align;
405 }
406
407 /* For a pointer valued expression EXP compute values M and N such that M
408 divides (EXP - N) and such that N < M. If these numbers can be determined,
409 store M in alignp and N in *BITPOSP and return true. Return false if
410 the results are just a conservative approximation.
411
412 If EXP is not a pointer, false is returned too. */
413
414 bool
415 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
416 unsigned HOST_WIDE_INT *bitposp)
417 {
418 STRIP_NOPS (exp);
419
420 if (TREE_CODE (exp) == ADDR_EXPR)
421 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
422 alignp, bitposp, true);
423 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
424 {
425 unsigned int align;
426 unsigned HOST_WIDE_INT bitpos;
427 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
428 &align, &bitpos);
429 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
430 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
431 else
432 {
433 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
434 if (trailing_zeros < HOST_BITS_PER_INT)
435 {
436 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
437 if (inner)
438 align = MIN (align, inner);
439 }
440 }
441 *alignp = align;
442 *bitposp = bitpos & (align - 1);
443 return res;
444 }
445 else if (TREE_CODE (exp) == SSA_NAME
446 && POINTER_TYPE_P (TREE_TYPE (exp)))
447 {
448 unsigned int ptr_align, ptr_misalign;
449 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
450
451 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
452 {
453 *bitposp = ptr_misalign * BITS_PER_UNIT;
454 *alignp = ptr_align * BITS_PER_UNIT;
455 /* Make sure to return a sensible alignment when the multiplication
456 by BITS_PER_UNIT overflowed. */
457 if (*alignp == 0)
458 *alignp = 1u << (HOST_BITS_PER_INT - 1);
459 /* We cannot really tell whether this result is an approximation. */
460 return false;
461 }
462 else
463 {
464 *bitposp = 0;
465 *alignp = BITS_PER_UNIT;
466 return false;
467 }
468 }
469 else if (TREE_CODE (exp) == INTEGER_CST)
470 {
471 *alignp = BIGGEST_ALIGNMENT;
472 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
473 & (BIGGEST_ALIGNMENT - 1));
474 return true;
475 }
476
477 *bitposp = 0;
478 *alignp = BITS_PER_UNIT;
479 return false;
480 }
481
482 /* Return the alignment in bits of EXP, a pointer valued expression.
483 The alignment returned is, by default, the alignment of the thing that
484 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
485
486 Otherwise, look at the expression to see if we can do better, i.e., if the
487 expression is actually pointing at an object whose alignment is tighter. */
488
489 unsigned int
490 get_pointer_alignment (tree exp)
491 {
492 unsigned HOST_WIDE_INT bitpos = 0;
493 unsigned int align;
494
495 get_pointer_alignment_1 (exp, &align, &bitpos);
496
497 /* align and bitpos now specify known low bits of the pointer.
498 ptr & (align - 1) == bitpos. */
499
500 if (bitpos != 0)
501 align = least_bit_hwi (bitpos);
502
503 return align;
504 }
505
506 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
507 way, because it could contain a zero byte in the middle.
508 TREE_STRING_LENGTH is the size of the character array, not the string.
509
510 ONLY_VALUE should be nonzero if the result is not going to be emitted
511 into the instruction stream and zero if it is going to be expanded.
512 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
513 is returned, otherwise NULL, since
514 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
515 evaluate the side-effects.
516
517 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
518 accesses. Note that this implies the result is not going to be emitted
519 into the instruction stream.
520
521 The value returned is of type `ssizetype'.
522
523 Unfortunately, string_constant can't access the values of const char
524 arrays with initializers, so neither can we do so here. */
525
526 tree
527 c_strlen (tree src, int only_value)
528 {
529 tree offset_node;
530 HOST_WIDE_INT offset;
531 int max;
532 const char *ptr;
533 location_t loc;
534
535 STRIP_NOPS (src);
536 if (TREE_CODE (src) == COND_EXPR
537 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
538 {
539 tree len1, len2;
540
541 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
542 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
543 if (tree_int_cst_equal (len1, len2))
544 return len1;
545 }
546
547 if (TREE_CODE (src) == COMPOUND_EXPR
548 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
549 return c_strlen (TREE_OPERAND (src, 1), only_value);
550
551 loc = EXPR_LOC_OR_LOC (src, input_location);
552
553 src = string_constant (src, &offset_node);
554 if (src == 0)
555 return NULL_TREE;
556
557 max = TREE_STRING_LENGTH (src) - 1;
558 ptr = TREE_STRING_POINTER (src);
559
560 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
561 {
562 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
563 compute the offset to the following null if we don't know where to
564 start searching for it. */
565 int i;
566
567 for (i = 0; i < max; i++)
568 if (ptr[i] == 0)
569 return NULL_TREE;
570
571 /* We don't know the starting offset, but we do know that the string
572 has no internal zero bytes. We can assume that the offset falls
573 within the bounds of the string; otherwise, the programmer deserves
574 what he gets. Subtract the offset from the length of the string,
575 and return that. This would perhaps not be valid if we were dealing
576 with named arrays in addition to literal string constants. */
577
578 return size_diffop_loc (loc, size_int (max), offset_node);
579 }
580
581 /* We have a known offset into the string. Start searching there for
582 a null character if we can represent it as a single HOST_WIDE_INT. */
583 if (offset_node == 0)
584 offset = 0;
585 else if (! tree_fits_shwi_p (offset_node))
586 offset = -1;
587 else
588 offset = tree_to_shwi (offset_node);
589
590 /* If the offset is known to be out of bounds, warn, and call strlen at
591 runtime. */
592 if (offset < 0 || offset > max)
593 {
594 /* Suppress multiple warnings for propagated constant strings. */
595 if (only_value != 2
596 && !TREE_NO_WARNING (src))
597 {
598 warning_at (loc, 0, "offset outside bounds of constant string");
599 TREE_NO_WARNING (src) = 1;
600 }
601 return NULL_TREE;
602 }
603
604 /* Use strlen to search for the first zero byte. Since any strings
605 constructed with build_string will have nulls appended, we win even
606 if we get handed something like (char[4])"abcd".
607
608 Since OFFSET is our starting index into the string, no further
609 calculation is needed. */
610 return ssize_int (strlen (ptr + offset));
611 }
612
613 /* Return a constant integer corresponding to target reading
614 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
615
616 static rtx
617 c_readstr (const char *str, machine_mode mode)
618 {
619 HOST_WIDE_INT ch;
620 unsigned int i, j;
621 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
622
623 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
624 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
625 / HOST_BITS_PER_WIDE_INT;
626
627 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
628 for (i = 0; i < len; i++)
629 tmp[i] = 0;
630
631 ch = 1;
632 for (i = 0; i < GET_MODE_SIZE (mode); i++)
633 {
634 j = i;
635 if (WORDS_BIG_ENDIAN)
636 j = GET_MODE_SIZE (mode) - i - 1;
637 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
638 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
639 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
640 j *= BITS_PER_UNIT;
641
642 if (ch)
643 ch = (unsigned char) str[i];
644 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
645 }
646
647 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
648 return immed_wide_int_const (c, mode);
649 }
650
651 /* Cast a target constant CST to target CHAR and if that value fits into
652 host char type, return zero and put that value into variable pointed to by
653 P. */
654
655 static int
656 target_char_cast (tree cst, char *p)
657 {
658 unsigned HOST_WIDE_INT val, hostval;
659
660 if (TREE_CODE (cst) != INTEGER_CST
661 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
662 return 1;
663
664 /* Do not care if it fits or not right here. */
665 val = TREE_INT_CST_LOW (cst);
666
667 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
668 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
669
670 hostval = val;
671 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
672 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
673
674 if (val != hostval)
675 return 1;
676
677 *p = hostval;
678 return 0;
679 }
680
681 /* Similar to save_expr, but assumes that arbitrary code is not executed
682 in between the multiple evaluations. In particular, we assume that a
683 non-addressable local variable will not be modified. */
684
685 static tree
686 builtin_save_expr (tree exp)
687 {
688 if (TREE_CODE (exp) == SSA_NAME
689 || (TREE_ADDRESSABLE (exp) == 0
690 && (TREE_CODE (exp) == PARM_DECL
691 || (VAR_P (exp) && !TREE_STATIC (exp)))))
692 return exp;
693
694 return save_expr (exp);
695 }
696
697 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
698 times to get the address of either a higher stack frame, or a return
699 address located within it (depending on FNDECL_CODE). */
700
701 static rtx
702 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
703 {
704 int i;
705 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
706 if (tem == NULL_RTX)
707 {
708 /* For a zero count with __builtin_return_address, we don't care what
709 frame address we return, because target-specific definitions will
710 override us. Therefore frame pointer elimination is OK, and using
711 the soft frame pointer is OK.
712
713 For a nonzero count, or a zero count with __builtin_frame_address,
714 we require a stable offset from the current frame pointer to the
715 previous one, so we must use the hard frame pointer, and
716 we must disable frame pointer elimination. */
717 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
718 tem = frame_pointer_rtx;
719 else
720 {
721 tem = hard_frame_pointer_rtx;
722
723 /* Tell reload not to eliminate the frame pointer. */
724 crtl->accesses_prior_frames = 1;
725 }
726 }
727
728 if (count > 0)
729 SETUP_FRAME_ADDRESSES ();
730
731 /* On the SPARC, the return address is not in the frame, it is in a
732 register. There is no way to access it off of the current frame
733 pointer, but it can be accessed off the previous frame pointer by
734 reading the value from the register window save area. */
735 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
736 count--;
737
738 /* Scan back COUNT frames to the specified frame. */
739 for (i = 0; i < count; i++)
740 {
741 /* Assume the dynamic chain pointer is in the word that the
742 frame address points to, unless otherwise specified. */
743 tem = DYNAMIC_CHAIN_ADDRESS (tem);
744 tem = memory_address (Pmode, tem);
745 tem = gen_frame_mem (Pmode, tem);
746 tem = copy_to_reg (tem);
747 }
748
749 /* For __builtin_frame_address, return what we've got. But, on
750 the SPARC for example, we may have to add a bias. */
751 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
752 return FRAME_ADDR_RTX (tem);
753
754 /* For __builtin_return_address, get the return address from that frame. */
755 #ifdef RETURN_ADDR_RTX
756 tem = RETURN_ADDR_RTX (count, tem);
757 #else
758 tem = memory_address (Pmode,
759 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
760 tem = gen_frame_mem (Pmode, tem);
761 #endif
762 return tem;
763 }
764
765 /* Alias set used for setjmp buffer. */
766 static alias_set_type setjmp_alias_set = -1;
767
768 /* Construct the leading half of a __builtin_setjmp call. Control will
769 return to RECEIVER_LABEL. This is also called directly by the SJLJ
770 exception handling code. */
771
772 void
773 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
774 {
775 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
776 rtx stack_save;
777 rtx mem;
778
779 if (setjmp_alias_set == -1)
780 setjmp_alias_set = new_alias_set ();
781
782 buf_addr = convert_memory_address (Pmode, buf_addr);
783
784 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
785
786 /* We store the frame pointer and the address of receiver_label in
787 the buffer and use the rest of it for the stack save area, which
788 is machine-dependent. */
789
790 mem = gen_rtx_MEM (Pmode, buf_addr);
791 set_mem_alias_set (mem, setjmp_alias_set);
792 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
793
794 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
795 GET_MODE_SIZE (Pmode))),
796 set_mem_alias_set (mem, setjmp_alias_set);
797
798 emit_move_insn (validize_mem (mem),
799 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
800
801 stack_save = gen_rtx_MEM (sa_mode,
802 plus_constant (Pmode, buf_addr,
803 2 * GET_MODE_SIZE (Pmode)));
804 set_mem_alias_set (stack_save, setjmp_alias_set);
805 emit_stack_save (SAVE_NONLOCAL, &stack_save);
806
807 /* If there is further processing to do, do it. */
808 if (targetm.have_builtin_setjmp_setup ())
809 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
810
811 /* We have a nonlocal label. */
812 cfun->has_nonlocal_label = 1;
813 }
814
815 /* Construct the trailing part of a __builtin_setjmp call. This is
816 also called directly by the SJLJ exception handling code.
817 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
818
819 void
820 expand_builtin_setjmp_receiver (rtx receiver_label)
821 {
822 rtx chain;
823
824 /* Mark the FP as used when we get here, so we have to make sure it's
825 marked as used by this function. */
826 emit_use (hard_frame_pointer_rtx);
827
828 /* Mark the static chain as clobbered here so life information
829 doesn't get messed up for it. */
830 chain = targetm.calls.static_chain (current_function_decl, true);
831 if (chain && REG_P (chain))
832 emit_clobber (chain);
833
834 /* Now put in the code to restore the frame pointer, and argument
835 pointer, if needed. */
836 if (! targetm.have_nonlocal_goto ())
837 {
838 /* First adjust our frame pointer to its actual value. It was
839 previously set to the start of the virtual area corresponding to
840 the stacked variables when we branched here and now needs to be
841 adjusted to the actual hardware fp value.
842
843 Assignments to virtual registers are converted by
844 instantiate_virtual_regs into the corresponding assignment
845 to the underlying register (fp in this case) that makes
846 the original assignment true.
847 So the following insn will actually be decrementing fp by
848 STARTING_FRAME_OFFSET. */
849 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
850
851 /* Restoring the frame pointer also modifies the hard frame pointer.
852 Mark it used (so that the previous assignment remains live once
853 the frame pointer is eliminated) and clobbered (to represent the
854 implicit update from the assignment). */
855 emit_use (hard_frame_pointer_rtx);
856 emit_clobber (hard_frame_pointer_rtx);
857 }
858
859 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
860 {
861 /* If the argument pointer can be eliminated in favor of the
862 frame pointer, we don't need to restore it. We assume here
863 that if such an elimination is present, it can always be used.
864 This is the case on all known machines; if we don't make this
865 assumption, we do unnecessary saving on many machines. */
866 size_t i;
867 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
868
869 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
870 if (elim_regs[i].from == ARG_POINTER_REGNUM
871 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
872 break;
873
874 if (i == ARRAY_SIZE (elim_regs))
875 {
876 /* Now restore our arg pointer from the address at which it
877 was saved in our stack frame. */
878 emit_move_insn (crtl->args.internal_arg_pointer,
879 copy_to_reg (get_arg_pointer_save_area ()));
880 }
881 }
882
883 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
884 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
885 else if (targetm.have_nonlocal_goto_receiver ())
886 emit_insn (targetm.gen_nonlocal_goto_receiver ());
887 else
888 { /* Nothing */ }
889
890 /* We must not allow the code we just generated to be reordered by
891 scheduling. Specifically, the update of the frame pointer must
892 happen immediately, not later. */
893 emit_insn (gen_blockage ());
894 }
895
896 /* __builtin_longjmp is passed a pointer to an array of five words (not
897 all will be used on all machines). It operates similarly to the C
898 library function of the same name, but is more efficient. Much of
899 the code below is copied from the handling of non-local gotos. */
900
901 static void
902 expand_builtin_longjmp (rtx buf_addr, rtx value)
903 {
904 rtx fp, lab, stack;
905 rtx_insn *insn, *last;
906 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
907
908 /* DRAP is needed for stack realign if longjmp is expanded to current
909 function */
910 if (SUPPORTS_STACK_ALIGNMENT)
911 crtl->need_drap = true;
912
913 if (setjmp_alias_set == -1)
914 setjmp_alias_set = new_alias_set ();
915
916 buf_addr = convert_memory_address (Pmode, buf_addr);
917
918 buf_addr = force_reg (Pmode, buf_addr);
919
920 /* We require that the user must pass a second argument of 1, because
921 that is what builtin_setjmp will return. */
922 gcc_assert (value == const1_rtx);
923
924 last = get_last_insn ();
925 if (targetm.have_builtin_longjmp ())
926 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
927 else
928 {
929 fp = gen_rtx_MEM (Pmode, buf_addr);
930 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
931 GET_MODE_SIZE (Pmode)));
932
933 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
934 2 * GET_MODE_SIZE (Pmode)));
935 set_mem_alias_set (fp, setjmp_alias_set);
936 set_mem_alias_set (lab, setjmp_alias_set);
937 set_mem_alias_set (stack, setjmp_alias_set);
938
939 /* Pick up FP, label, and SP from the block and jump. This code is
940 from expand_goto in stmt.c; see there for detailed comments. */
941 if (targetm.have_nonlocal_goto ())
942 /* We have to pass a value to the nonlocal_goto pattern that will
943 get copied into the static_chain pointer, but it does not matter
944 what that value is, because builtin_setjmp does not use it. */
945 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
946 else
947 {
948 lab = copy_to_reg (lab);
949
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
952
953 emit_move_insn (hard_frame_pointer_rtx, fp);
954 emit_stack_restore (SAVE_NONLOCAL, stack);
955
956 emit_use (hard_frame_pointer_rtx);
957 emit_use (stack_pointer_rtx);
958 emit_indirect_jump (lab);
959 }
960 }
961
962 /* Search backwards and mark the jump insn as a non-local goto.
963 Note that this precludes the use of __builtin_longjmp to a
964 __builtin_setjmp target in the same function. However, we've
965 already cautioned the user that these functions are for
966 internal exception handling use only. */
967 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
968 {
969 gcc_assert (insn != last);
970
971 if (JUMP_P (insn))
972 {
973 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
974 break;
975 }
976 else if (CALL_P (insn))
977 break;
978 }
979 }
980
981 static inline bool
982 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
983 {
984 return (iter->i < iter->n);
985 }
986
987 /* This function validates the types of a function call argument list
988 against a specified list of tree_codes. If the last specifier is a 0,
989 that represents an ellipses, otherwise the last specifier must be a
990 VOID_TYPE. */
991
992 static bool
993 validate_arglist (const_tree callexpr, ...)
994 {
995 enum tree_code code;
996 bool res = 0;
997 va_list ap;
998 const_call_expr_arg_iterator iter;
999 const_tree arg;
1000
1001 va_start (ap, callexpr);
1002 init_const_call_expr_arg_iterator (callexpr, &iter);
1003
1004 do
1005 {
1006 code = (enum tree_code) va_arg (ap, int);
1007 switch (code)
1008 {
1009 case 0:
1010 /* This signifies an ellipses, any further arguments are all ok. */
1011 res = true;
1012 goto end;
1013 case VOID_TYPE:
1014 /* This signifies an endlink, if no arguments remain, return
1015 true, otherwise return false. */
1016 res = !more_const_call_expr_args_p (&iter);
1017 goto end;
1018 default:
1019 /* If no parameters remain or the parameter's code does not
1020 match the specified code, return false. Otherwise continue
1021 checking any remaining arguments. */
1022 arg = next_const_call_expr_arg (&iter);
1023 if (!validate_arg (arg, code))
1024 goto end;
1025 break;
1026 }
1027 }
1028 while (1);
1029
1030 /* We need gotos here since we can only have one VA_CLOSE in a
1031 function. */
1032 end: ;
1033 va_end (ap);
1034
1035 return res;
1036 }
1037
1038 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1039 and the address of the save area. */
1040
1041 static rtx
1042 expand_builtin_nonlocal_goto (tree exp)
1043 {
1044 tree t_label, t_save_area;
1045 rtx r_label, r_save_area, r_fp, r_sp;
1046 rtx_insn *insn;
1047
1048 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1049 return NULL_RTX;
1050
1051 t_label = CALL_EXPR_ARG (exp, 0);
1052 t_save_area = CALL_EXPR_ARG (exp, 1);
1053
1054 r_label = expand_normal (t_label);
1055 r_label = convert_memory_address (Pmode, r_label);
1056 r_save_area = expand_normal (t_save_area);
1057 r_save_area = convert_memory_address (Pmode, r_save_area);
1058 /* Copy the address of the save location to a register just in case it was
1059 based on the frame pointer. */
1060 r_save_area = copy_to_reg (r_save_area);
1061 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1062 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1063 plus_constant (Pmode, r_save_area,
1064 GET_MODE_SIZE (Pmode)));
1065
1066 crtl->has_nonlocal_goto = 1;
1067
1068 /* ??? We no longer need to pass the static chain value, afaik. */
1069 if (targetm.have_nonlocal_goto ())
1070 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1071 else
1072 {
1073 r_label = copy_to_reg (r_label);
1074
1075 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1076 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1077
1078 /* Restore frame pointer for containing function. */
1079 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1080 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1081
1082 /* USE of hard_frame_pointer_rtx added for consistency;
1083 not clear if really needed. */
1084 emit_use (hard_frame_pointer_rtx);
1085 emit_use (stack_pointer_rtx);
1086
1087 /* If the architecture is using a GP register, we must
1088 conservatively assume that the target function makes use of it.
1089 The prologue of functions with nonlocal gotos must therefore
1090 initialize the GP register to the appropriate value, and we
1091 must then make sure that this value is live at the point
1092 of the jump. (Note that this doesn't necessarily apply
1093 to targets with a nonlocal_goto pattern; they are free
1094 to implement it in their own way. Note also that this is
1095 a no-op if the GP register is a global invariant.) */
1096 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1097 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1098 emit_use (pic_offset_table_rtx);
1099
1100 emit_indirect_jump (r_label);
1101 }
1102
1103 /* Search backwards to the jump insn and mark it as a
1104 non-local goto. */
1105 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1106 {
1107 if (JUMP_P (insn))
1108 {
1109 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1110 break;
1111 }
1112 else if (CALL_P (insn))
1113 break;
1114 }
1115
1116 return const0_rtx;
1117 }
1118
1119 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1120 (not all will be used on all machines) that was passed to __builtin_setjmp.
1121 It updates the stack pointer in that block to the current value. This is
1122 also called directly by the SJLJ exception handling code. */
1123
1124 void
1125 expand_builtin_update_setjmp_buf (rtx buf_addr)
1126 {
1127 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1128 rtx stack_save
1129 = gen_rtx_MEM (sa_mode,
1130 memory_address
1131 (sa_mode,
1132 plus_constant (Pmode, buf_addr,
1133 2 * GET_MODE_SIZE (Pmode))));
1134
1135 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1136 }
1137
1138 /* Expand a call to __builtin_prefetch. For a target that does not support
1139 data prefetch, evaluate the memory address argument in case it has side
1140 effects. */
1141
1142 static void
1143 expand_builtin_prefetch (tree exp)
1144 {
1145 tree arg0, arg1, arg2;
1146 int nargs;
1147 rtx op0, op1, op2;
1148
1149 if (!validate_arglist (exp, POINTER_TYPE, 0))
1150 return;
1151
1152 arg0 = CALL_EXPR_ARG (exp, 0);
1153
1154 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1155 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1156 locality). */
1157 nargs = call_expr_nargs (exp);
1158 if (nargs > 1)
1159 arg1 = CALL_EXPR_ARG (exp, 1);
1160 else
1161 arg1 = integer_zero_node;
1162 if (nargs > 2)
1163 arg2 = CALL_EXPR_ARG (exp, 2);
1164 else
1165 arg2 = integer_three_node;
1166
1167 /* Argument 0 is an address. */
1168 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1169
1170 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1171 if (TREE_CODE (arg1) != INTEGER_CST)
1172 {
1173 error ("second argument to %<__builtin_prefetch%> must be a constant");
1174 arg1 = integer_zero_node;
1175 }
1176 op1 = expand_normal (arg1);
1177 /* Argument 1 must be either zero or one. */
1178 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1179 {
1180 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1181 " using zero");
1182 op1 = const0_rtx;
1183 }
1184
1185 /* Argument 2 (locality) must be a compile-time constant int. */
1186 if (TREE_CODE (arg2) != INTEGER_CST)
1187 {
1188 error ("third argument to %<__builtin_prefetch%> must be a constant");
1189 arg2 = integer_zero_node;
1190 }
1191 op2 = expand_normal (arg2);
1192 /* Argument 2 must be 0, 1, 2, or 3. */
1193 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1194 {
1195 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1196 op2 = const0_rtx;
1197 }
1198
1199 if (targetm.have_prefetch ())
1200 {
1201 struct expand_operand ops[3];
1202
1203 create_address_operand (&ops[0], op0);
1204 create_integer_operand (&ops[1], INTVAL (op1));
1205 create_integer_operand (&ops[2], INTVAL (op2));
1206 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1207 return;
1208 }
1209
1210 /* Don't do anything with direct references to volatile memory, but
1211 generate code to handle other side effects. */
1212 if (!MEM_P (op0) && side_effects_p (op0))
1213 emit_insn (op0);
1214 }
1215
1216 /* Get a MEM rtx for expression EXP which is the address of an operand
1217 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1218 the maximum length of the block of memory that might be accessed or
1219 NULL if unknown. */
1220
1221 static rtx
1222 get_memory_rtx (tree exp, tree len)
1223 {
1224 tree orig_exp = exp;
1225 rtx addr, mem;
1226
1227 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1228 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1229 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1230 exp = TREE_OPERAND (exp, 0);
1231
1232 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1233 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1234
1235 /* Get an expression we can use to find the attributes to assign to MEM.
1236 First remove any nops. */
1237 while (CONVERT_EXPR_P (exp)
1238 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1239 exp = TREE_OPERAND (exp, 0);
1240
1241 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1242 (as builtin stringops may alias with anything). */
1243 exp = fold_build2 (MEM_REF,
1244 build_array_type (char_type_node,
1245 build_range_type (sizetype,
1246 size_one_node, len)),
1247 exp, build_int_cst (ptr_type_node, 0));
1248
1249 /* If the MEM_REF has no acceptable address, try to get the base object
1250 from the original address we got, and build an all-aliasing
1251 unknown-sized access to that one. */
1252 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1253 set_mem_attributes (mem, exp, 0);
1254 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1255 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1256 0))))
1257 {
1258 exp = build_fold_addr_expr (exp);
1259 exp = fold_build2 (MEM_REF,
1260 build_array_type (char_type_node,
1261 build_range_type (sizetype,
1262 size_zero_node,
1263 NULL)),
1264 exp, build_int_cst (ptr_type_node, 0));
1265 set_mem_attributes (mem, exp, 0);
1266 }
1267 set_mem_alias_set (mem, 0);
1268 return mem;
1269 }
1270 \f
1271 /* Built-in functions to perform an untyped call and return. */
1272
1273 #define apply_args_mode \
1274 (this_target_builtins->x_apply_args_mode)
1275 #define apply_result_mode \
1276 (this_target_builtins->x_apply_result_mode)
1277
1278 /* Return the size required for the block returned by __builtin_apply_args,
1279 and initialize apply_args_mode. */
1280
1281 static int
1282 apply_args_size (void)
1283 {
1284 static int size = -1;
1285 int align;
1286 unsigned int regno;
1287 machine_mode mode;
1288
1289 /* The values computed by this function never change. */
1290 if (size < 0)
1291 {
1292 /* The first value is the incoming arg-pointer. */
1293 size = GET_MODE_SIZE (Pmode);
1294
1295 /* The second value is the structure value address unless this is
1296 passed as an "invisible" first argument. */
1297 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1298 size += GET_MODE_SIZE (Pmode);
1299
1300 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1301 if (FUNCTION_ARG_REGNO_P (regno))
1302 {
1303 mode = targetm.calls.get_raw_arg_mode (regno);
1304
1305 gcc_assert (mode != VOIDmode);
1306
1307 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1308 if (size % align != 0)
1309 size = CEIL (size, align) * align;
1310 size += GET_MODE_SIZE (mode);
1311 apply_args_mode[regno] = mode;
1312 }
1313 else
1314 {
1315 apply_args_mode[regno] = VOIDmode;
1316 }
1317 }
1318 return size;
1319 }
1320
1321 /* Return the size required for the block returned by __builtin_apply,
1322 and initialize apply_result_mode. */
1323
1324 static int
1325 apply_result_size (void)
1326 {
1327 static int size = -1;
1328 int align, regno;
1329 machine_mode mode;
1330
1331 /* The values computed by this function never change. */
1332 if (size < 0)
1333 {
1334 size = 0;
1335
1336 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1337 if (targetm.calls.function_value_regno_p (regno))
1338 {
1339 mode = targetm.calls.get_raw_result_mode (regno);
1340
1341 gcc_assert (mode != VOIDmode);
1342
1343 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1344 if (size % align != 0)
1345 size = CEIL (size, align) * align;
1346 size += GET_MODE_SIZE (mode);
1347 apply_result_mode[regno] = mode;
1348 }
1349 else
1350 apply_result_mode[regno] = VOIDmode;
1351
1352 /* Allow targets that use untyped_call and untyped_return to override
1353 the size so that machine-specific information can be stored here. */
1354 #ifdef APPLY_RESULT_SIZE
1355 size = APPLY_RESULT_SIZE;
1356 #endif
1357 }
1358 return size;
1359 }
1360
1361 /* Create a vector describing the result block RESULT. If SAVEP is true,
1362 the result block is used to save the values; otherwise it is used to
1363 restore the values. */
1364
1365 static rtx
1366 result_vector (int savep, rtx result)
1367 {
1368 int regno, size, align, nelts;
1369 machine_mode mode;
1370 rtx reg, mem;
1371 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1372
1373 size = nelts = 0;
1374 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1375 if ((mode = apply_result_mode[regno]) != VOIDmode)
1376 {
1377 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1378 if (size % align != 0)
1379 size = CEIL (size, align) * align;
1380 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1381 mem = adjust_address (result, mode, size);
1382 savevec[nelts++] = (savep
1383 ? gen_rtx_SET (mem, reg)
1384 : gen_rtx_SET (reg, mem));
1385 size += GET_MODE_SIZE (mode);
1386 }
1387 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1388 }
1389
1390 /* Save the state required to perform an untyped call with the same
1391 arguments as were passed to the current function. */
1392
1393 static rtx
1394 expand_builtin_apply_args_1 (void)
1395 {
1396 rtx registers, tem;
1397 int size, align, regno;
1398 machine_mode mode;
1399 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1400
1401 /* Create a block where the arg-pointer, structure value address,
1402 and argument registers can be saved. */
1403 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1404
1405 /* Walk past the arg-pointer and structure value address. */
1406 size = GET_MODE_SIZE (Pmode);
1407 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1408 size += GET_MODE_SIZE (Pmode);
1409
1410 /* Save each register used in calling a function to the block. */
1411 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1412 if ((mode = apply_args_mode[regno]) != VOIDmode)
1413 {
1414 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1415 if (size % align != 0)
1416 size = CEIL (size, align) * align;
1417
1418 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1419
1420 emit_move_insn (adjust_address (registers, mode, size), tem);
1421 size += GET_MODE_SIZE (mode);
1422 }
1423
1424 /* Save the arg pointer to the block. */
1425 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1426 /* We need the pointer as the caller actually passed them to us, not
1427 as we might have pretended they were passed. Make sure it's a valid
1428 operand, as emit_move_insn isn't expected to handle a PLUS. */
1429 if (STACK_GROWS_DOWNWARD)
1430 tem
1431 = force_operand (plus_constant (Pmode, tem,
1432 crtl->args.pretend_args_size),
1433 NULL_RTX);
1434 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1435
1436 size = GET_MODE_SIZE (Pmode);
1437
1438 /* Save the structure value address unless this is passed as an
1439 "invisible" first argument. */
1440 if (struct_incoming_value)
1441 {
1442 emit_move_insn (adjust_address (registers, Pmode, size),
1443 copy_to_reg (struct_incoming_value));
1444 size += GET_MODE_SIZE (Pmode);
1445 }
1446
1447 /* Return the address of the block. */
1448 return copy_addr_to_reg (XEXP (registers, 0));
1449 }
1450
1451 /* __builtin_apply_args returns block of memory allocated on
1452 the stack into which is stored the arg pointer, structure
1453 value address, static chain, and all the registers that might
1454 possibly be used in performing a function call. The code is
1455 moved to the start of the function so the incoming values are
1456 saved. */
1457
1458 static rtx
1459 expand_builtin_apply_args (void)
1460 {
1461 /* Don't do __builtin_apply_args more than once in a function.
1462 Save the result of the first call and reuse it. */
1463 if (apply_args_value != 0)
1464 return apply_args_value;
1465 {
1466 /* When this function is called, it means that registers must be
1467 saved on entry to this function. So we migrate the
1468 call to the first insn of this function. */
1469 rtx temp;
1470
1471 start_sequence ();
1472 temp = expand_builtin_apply_args_1 ();
1473 rtx_insn *seq = get_insns ();
1474 end_sequence ();
1475
1476 apply_args_value = temp;
1477
1478 /* Put the insns after the NOTE that starts the function.
1479 If this is inside a start_sequence, make the outer-level insn
1480 chain current, so the code is placed at the start of the
1481 function. If internal_arg_pointer is a non-virtual pseudo,
1482 it needs to be placed after the function that initializes
1483 that pseudo. */
1484 push_topmost_sequence ();
1485 if (REG_P (crtl->args.internal_arg_pointer)
1486 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1487 emit_insn_before (seq, parm_birth_insn);
1488 else
1489 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1490 pop_topmost_sequence ();
1491 return temp;
1492 }
1493 }
1494
1495 /* Perform an untyped call and save the state required to perform an
1496 untyped return of whatever value was returned by the given function. */
1497
1498 static rtx
1499 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1500 {
1501 int size, align, regno;
1502 machine_mode mode;
1503 rtx incoming_args, result, reg, dest, src;
1504 rtx_call_insn *call_insn;
1505 rtx old_stack_level = 0;
1506 rtx call_fusage = 0;
1507 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1508
1509 arguments = convert_memory_address (Pmode, arguments);
1510
1511 /* Create a block where the return registers can be saved. */
1512 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1513
1514 /* Fetch the arg pointer from the ARGUMENTS block. */
1515 incoming_args = gen_reg_rtx (Pmode);
1516 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1517 if (!STACK_GROWS_DOWNWARD)
1518 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1519 incoming_args, 0, OPTAB_LIB_WIDEN);
1520
1521 /* Push a new argument block and copy the arguments. Do not allow
1522 the (potential) memcpy call below to interfere with our stack
1523 manipulations. */
1524 do_pending_stack_adjust ();
1525 NO_DEFER_POP;
1526
1527 /* Save the stack with nonlocal if available. */
1528 if (targetm.have_save_stack_nonlocal ())
1529 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1530 else
1531 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1532
1533 /* Allocate a block of memory onto the stack and copy the memory
1534 arguments to the outgoing arguments address. We can pass TRUE
1535 as the 4th argument because we just saved the stack pointer
1536 and will restore it right after the call. */
1537 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1538
1539 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1540 may have already set current_function_calls_alloca to true.
1541 current_function_calls_alloca won't be set if argsize is zero,
1542 so we have to guarantee need_drap is true here. */
1543 if (SUPPORTS_STACK_ALIGNMENT)
1544 crtl->need_drap = true;
1545
1546 dest = virtual_outgoing_args_rtx;
1547 if (!STACK_GROWS_DOWNWARD)
1548 {
1549 if (CONST_INT_P (argsize))
1550 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1551 else
1552 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1553 }
1554 dest = gen_rtx_MEM (BLKmode, dest);
1555 set_mem_align (dest, PARM_BOUNDARY);
1556 src = gen_rtx_MEM (BLKmode, incoming_args);
1557 set_mem_align (src, PARM_BOUNDARY);
1558 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1559
1560 /* Refer to the argument block. */
1561 apply_args_size ();
1562 arguments = gen_rtx_MEM (BLKmode, arguments);
1563 set_mem_align (arguments, PARM_BOUNDARY);
1564
1565 /* Walk past the arg-pointer and structure value address. */
1566 size = GET_MODE_SIZE (Pmode);
1567 if (struct_value)
1568 size += GET_MODE_SIZE (Pmode);
1569
1570 /* Restore each of the registers previously saved. Make USE insns
1571 for each of these registers for use in making the call. */
1572 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1573 if ((mode = apply_args_mode[regno]) != VOIDmode)
1574 {
1575 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1576 if (size % align != 0)
1577 size = CEIL (size, align) * align;
1578 reg = gen_rtx_REG (mode, regno);
1579 emit_move_insn (reg, adjust_address (arguments, mode, size));
1580 use_reg (&call_fusage, reg);
1581 size += GET_MODE_SIZE (mode);
1582 }
1583
1584 /* Restore the structure value address unless this is passed as an
1585 "invisible" first argument. */
1586 size = GET_MODE_SIZE (Pmode);
1587 if (struct_value)
1588 {
1589 rtx value = gen_reg_rtx (Pmode);
1590 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1591 emit_move_insn (struct_value, value);
1592 if (REG_P (struct_value))
1593 use_reg (&call_fusage, struct_value);
1594 size += GET_MODE_SIZE (Pmode);
1595 }
1596
1597 /* All arguments and registers used for the call are set up by now! */
1598 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1599
1600 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1601 and we don't want to load it into a register as an optimization,
1602 because prepare_call_address already did it if it should be done. */
1603 if (GET_CODE (function) != SYMBOL_REF)
1604 function = memory_address (FUNCTION_MODE, function);
1605
1606 /* Generate the actual call instruction and save the return value. */
1607 if (targetm.have_untyped_call ())
1608 {
1609 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1610 emit_call_insn (targetm.gen_untyped_call (mem, result,
1611 result_vector (1, result)));
1612 }
1613 else if (targetm.have_call_value ())
1614 {
1615 rtx valreg = 0;
1616
1617 /* Locate the unique return register. It is not possible to
1618 express a call that sets more than one return register using
1619 call_value; use untyped_call for that. In fact, untyped_call
1620 only needs to save the return registers in the given block. */
1621 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1622 if ((mode = apply_result_mode[regno]) != VOIDmode)
1623 {
1624 gcc_assert (!valreg); /* have_untyped_call required. */
1625
1626 valreg = gen_rtx_REG (mode, regno);
1627 }
1628
1629 emit_insn (targetm.gen_call_value (valreg,
1630 gen_rtx_MEM (FUNCTION_MODE, function),
1631 const0_rtx, NULL_RTX, const0_rtx));
1632
1633 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1634 }
1635 else
1636 gcc_unreachable ();
1637
1638 /* Find the CALL insn we just emitted, and attach the register usage
1639 information. */
1640 call_insn = last_call_insn ();
1641 add_function_usage_to (call_insn, call_fusage);
1642
1643 /* Restore the stack. */
1644 if (targetm.have_save_stack_nonlocal ())
1645 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1646 else
1647 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1648 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1649
1650 OK_DEFER_POP;
1651
1652 /* Return the address of the result block. */
1653 result = copy_addr_to_reg (XEXP (result, 0));
1654 return convert_memory_address (ptr_mode, result);
1655 }
1656
1657 /* Perform an untyped return. */
1658
1659 static void
1660 expand_builtin_return (rtx result)
1661 {
1662 int size, align, regno;
1663 machine_mode mode;
1664 rtx reg;
1665 rtx_insn *call_fusage = 0;
1666
1667 result = convert_memory_address (Pmode, result);
1668
1669 apply_result_size ();
1670 result = gen_rtx_MEM (BLKmode, result);
1671
1672 if (targetm.have_untyped_return ())
1673 {
1674 rtx vector = result_vector (0, result);
1675 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1676 emit_barrier ();
1677 return;
1678 }
1679
1680 /* Restore the return value and note that each value is used. */
1681 size = 0;
1682 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1683 if ((mode = apply_result_mode[regno]) != VOIDmode)
1684 {
1685 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1686 if (size % align != 0)
1687 size = CEIL (size, align) * align;
1688 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1689 emit_move_insn (reg, adjust_address (result, mode, size));
1690
1691 push_to_sequence (call_fusage);
1692 emit_use (reg);
1693 call_fusage = get_insns ();
1694 end_sequence ();
1695 size += GET_MODE_SIZE (mode);
1696 }
1697
1698 /* Put the USE insns before the return. */
1699 emit_insn (call_fusage);
1700
1701 /* Return whatever values was restored by jumping directly to the end
1702 of the function. */
1703 expand_naked_return ();
1704 }
1705
1706 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1707
1708 static enum type_class
1709 type_to_class (tree type)
1710 {
1711 switch (TREE_CODE (type))
1712 {
1713 case VOID_TYPE: return void_type_class;
1714 case INTEGER_TYPE: return integer_type_class;
1715 case ENUMERAL_TYPE: return enumeral_type_class;
1716 case BOOLEAN_TYPE: return boolean_type_class;
1717 case POINTER_TYPE: return pointer_type_class;
1718 case REFERENCE_TYPE: return reference_type_class;
1719 case OFFSET_TYPE: return offset_type_class;
1720 case REAL_TYPE: return real_type_class;
1721 case COMPLEX_TYPE: return complex_type_class;
1722 case FUNCTION_TYPE: return function_type_class;
1723 case METHOD_TYPE: return method_type_class;
1724 case RECORD_TYPE: return record_type_class;
1725 case UNION_TYPE:
1726 case QUAL_UNION_TYPE: return union_type_class;
1727 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1728 ? string_type_class : array_type_class);
1729 case LANG_TYPE: return lang_type_class;
1730 default: return no_type_class;
1731 }
1732 }
1733
1734 /* Expand a call EXP to __builtin_classify_type. */
1735
1736 static rtx
1737 expand_builtin_classify_type (tree exp)
1738 {
1739 if (call_expr_nargs (exp))
1740 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1741 return GEN_INT (no_type_class);
1742 }
1743
1744 /* This helper macro, meant to be used in mathfn_built_in below,
1745 determines which among a set of three builtin math functions is
1746 appropriate for a given type mode. The `F' and `L' cases are
1747 automatically generated from the `double' case. */
1748 #define CASE_MATHFN(MATHFN) \
1749 CASE_CFN_##MATHFN: \
1750 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1751 fcodel = BUILT_IN_##MATHFN##L ; break;
1752 /* Similar to above, but appends _R after any F/L suffix. */
1753 #define CASE_MATHFN_REENT(MATHFN) \
1754 case CFN_BUILT_IN_##MATHFN##_R: \
1755 case CFN_BUILT_IN_##MATHFN##F_R: \
1756 case CFN_BUILT_IN_##MATHFN##L_R: \
1757 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1758 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1759
1760 /* Return a function equivalent to FN but operating on floating-point
1761 values of type TYPE, or END_BUILTINS if no such function exists.
1762 This is purely an operation on function codes; it does not guarantee
1763 that the target actually has an implementation of the function. */
1764
1765 static built_in_function
1766 mathfn_built_in_2 (tree type, combined_fn fn)
1767 {
1768 built_in_function fcode, fcodef, fcodel;
1769
1770 switch (fn)
1771 {
1772 CASE_MATHFN (ACOS)
1773 CASE_MATHFN (ACOSH)
1774 CASE_MATHFN (ASIN)
1775 CASE_MATHFN (ASINH)
1776 CASE_MATHFN (ATAN)
1777 CASE_MATHFN (ATAN2)
1778 CASE_MATHFN (ATANH)
1779 CASE_MATHFN (CBRT)
1780 CASE_MATHFN (CEIL)
1781 CASE_MATHFN (CEXPI)
1782 CASE_MATHFN (COPYSIGN)
1783 CASE_MATHFN (COS)
1784 CASE_MATHFN (COSH)
1785 CASE_MATHFN (DREM)
1786 CASE_MATHFN (ERF)
1787 CASE_MATHFN (ERFC)
1788 CASE_MATHFN (EXP)
1789 CASE_MATHFN (EXP10)
1790 CASE_MATHFN (EXP2)
1791 CASE_MATHFN (EXPM1)
1792 CASE_MATHFN (FABS)
1793 CASE_MATHFN (FDIM)
1794 CASE_MATHFN (FLOOR)
1795 CASE_MATHFN (FMA)
1796 CASE_MATHFN (FMAX)
1797 CASE_MATHFN (FMIN)
1798 CASE_MATHFN (FMOD)
1799 CASE_MATHFN (FREXP)
1800 CASE_MATHFN (GAMMA)
1801 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1802 CASE_MATHFN (HUGE_VAL)
1803 CASE_MATHFN (HYPOT)
1804 CASE_MATHFN (ILOGB)
1805 CASE_MATHFN (ICEIL)
1806 CASE_MATHFN (IFLOOR)
1807 CASE_MATHFN (INF)
1808 CASE_MATHFN (IRINT)
1809 CASE_MATHFN (IROUND)
1810 CASE_MATHFN (ISINF)
1811 CASE_MATHFN (J0)
1812 CASE_MATHFN (J1)
1813 CASE_MATHFN (JN)
1814 CASE_MATHFN (LCEIL)
1815 CASE_MATHFN (LDEXP)
1816 CASE_MATHFN (LFLOOR)
1817 CASE_MATHFN (LGAMMA)
1818 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1819 CASE_MATHFN (LLCEIL)
1820 CASE_MATHFN (LLFLOOR)
1821 CASE_MATHFN (LLRINT)
1822 CASE_MATHFN (LLROUND)
1823 CASE_MATHFN (LOG)
1824 CASE_MATHFN (LOG10)
1825 CASE_MATHFN (LOG1P)
1826 CASE_MATHFN (LOG2)
1827 CASE_MATHFN (LOGB)
1828 CASE_MATHFN (LRINT)
1829 CASE_MATHFN (LROUND)
1830 CASE_MATHFN (MODF)
1831 CASE_MATHFN (NAN)
1832 CASE_MATHFN (NANS)
1833 CASE_MATHFN (NEARBYINT)
1834 CASE_MATHFN (NEXTAFTER)
1835 CASE_MATHFN (NEXTTOWARD)
1836 CASE_MATHFN (POW)
1837 CASE_MATHFN (POWI)
1838 CASE_MATHFN (POW10)
1839 CASE_MATHFN (REMAINDER)
1840 CASE_MATHFN (REMQUO)
1841 CASE_MATHFN (RINT)
1842 CASE_MATHFN (ROUND)
1843 CASE_MATHFN (SCALB)
1844 CASE_MATHFN (SCALBLN)
1845 CASE_MATHFN (SCALBN)
1846 CASE_MATHFN (SIGNBIT)
1847 CASE_MATHFN (SIGNIFICAND)
1848 CASE_MATHFN (SIN)
1849 CASE_MATHFN (SINCOS)
1850 CASE_MATHFN (SINH)
1851 CASE_MATHFN (SQRT)
1852 CASE_MATHFN (TAN)
1853 CASE_MATHFN (TANH)
1854 CASE_MATHFN (TGAMMA)
1855 CASE_MATHFN (TRUNC)
1856 CASE_MATHFN (Y0)
1857 CASE_MATHFN (Y1)
1858 CASE_MATHFN (YN)
1859
1860 default:
1861 return END_BUILTINS;
1862 }
1863
1864 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1865 return fcode;
1866 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1867 return fcodef;
1868 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1869 return fcodel;
1870 else
1871 return END_BUILTINS;
1872 }
1873
1874 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1875 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1876 otherwise use the explicit declaration. If we can't do the conversion,
1877 return null. */
1878
1879 static tree
1880 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1881 {
1882 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1883 if (fcode2 == END_BUILTINS)
1884 return NULL_TREE;
1885
1886 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1887 return NULL_TREE;
1888
1889 return builtin_decl_explicit (fcode2);
1890 }
1891
1892 /* Like mathfn_built_in_1, but always use the implicit array. */
1893
1894 tree
1895 mathfn_built_in (tree type, combined_fn fn)
1896 {
1897 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1898 }
1899
1900 /* Like mathfn_built_in_1, but take a built_in_function and
1901 always use the implicit array. */
1902
1903 tree
1904 mathfn_built_in (tree type, enum built_in_function fn)
1905 {
1906 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1907 }
1908
1909 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1910 return its code, otherwise return IFN_LAST. Note that this function
1911 only tests whether the function is defined in internals.def, not whether
1912 it is actually available on the target. */
1913
1914 internal_fn
1915 associated_internal_fn (tree fndecl)
1916 {
1917 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1918 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1919 switch (DECL_FUNCTION_CODE (fndecl))
1920 {
1921 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1922 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1923 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1924 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1925 #include "internal-fn.def"
1926
1927 CASE_FLT_FN (BUILT_IN_POW10):
1928 return IFN_EXP10;
1929
1930 CASE_FLT_FN (BUILT_IN_DREM):
1931 return IFN_REMAINDER;
1932
1933 CASE_FLT_FN (BUILT_IN_SCALBN):
1934 CASE_FLT_FN (BUILT_IN_SCALBLN):
1935 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1936 return IFN_LDEXP;
1937 return IFN_LAST;
1938
1939 default:
1940 return IFN_LAST;
1941 }
1942 }
1943
1944 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1945 on the current target by a call to an internal function, return the
1946 code of that internal function, otherwise return IFN_LAST. The caller
1947 is responsible for ensuring that any side-effects of the built-in
1948 call are dealt with correctly. E.g. if CALL sets errno, the caller
1949 must decide that the errno result isn't needed or make it available
1950 in some other way. */
1951
1952 internal_fn
1953 replacement_internal_fn (gcall *call)
1954 {
1955 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1956 {
1957 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1958 if (ifn != IFN_LAST)
1959 {
1960 tree_pair types = direct_internal_fn_types (ifn, call);
1961 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1962 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1963 return ifn;
1964 }
1965 }
1966 return IFN_LAST;
1967 }
1968
1969 /* Expand a call to the builtin trinary math functions (fma).
1970 Return NULL_RTX if a normal call should be emitted rather than expanding the
1971 function in-line. EXP is the expression that is a call to the builtin
1972 function; if convenient, the result should be placed in TARGET.
1973 SUBTARGET may be used as the target for computing one of EXP's
1974 operands. */
1975
1976 static rtx
1977 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1978 {
1979 optab builtin_optab;
1980 rtx op0, op1, op2, result;
1981 rtx_insn *insns;
1982 tree fndecl = get_callee_fndecl (exp);
1983 tree arg0, arg1, arg2;
1984 machine_mode mode;
1985
1986 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1987 return NULL_RTX;
1988
1989 arg0 = CALL_EXPR_ARG (exp, 0);
1990 arg1 = CALL_EXPR_ARG (exp, 1);
1991 arg2 = CALL_EXPR_ARG (exp, 2);
1992
1993 switch (DECL_FUNCTION_CODE (fndecl))
1994 {
1995 CASE_FLT_FN (BUILT_IN_FMA):
1996 builtin_optab = fma_optab; break;
1997 default:
1998 gcc_unreachable ();
1999 }
2000
2001 /* Make a suitable register to place result in. */
2002 mode = TYPE_MODE (TREE_TYPE (exp));
2003
2004 /* Before working hard, check whether the instruction is available. */
2005 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2006 return NULL_RTX;
2007
2008 result = gen_reg_rtx (mode);
2009
2010 /* Always stabilize the argument list. */
2011 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2012 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2013 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2014
2015 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2016 op1 = expand_normal (arg1);
2017 op2 = expand_normal (arg2);
2018
2019 start_sequence ();
2020
2021 /* Compute into RESULT.
2022 Set RESULT to wherever the result comes back. */
2023 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2024 result, 0);
2025
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2029 if (result == 0)
2030 {
2031 end_sequence ();
2032 return expand_call (exp, target, target == const0_rtx);
2033 }
2034
2035 /* Output the entire sequence. */
2036 insns = get_insns ();
2037 end_sequence ();
2038 emit_insn (insns);
2039
2040 return result;
2041 }
2042
2043 /* Expand a call to the builtin sin and cos math functions.
2044 Return NULL_RTX if a normal call should be emitted rather than expanding the
2045 function in-line. EXP is the expression that is a call to the builtin
2046 function; if convenient, the result should be placed in TARGET.
2047 SUBTARGET may be used as the target for computing one of EXP's
2048 operands. */
2049
2050 static rtx
2051 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2052 {
2053 optab builtin_optab;
2054 rtx op0;
2055 rtx_insn *insns;
2056 tree fndecl = get_callee_fndecl (exp);
2057 machine_mode mode;
2058 tree arg;
2059
2060 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2061 return NULL_RTX;
2062
2063 arg = CALL_EXPR_ARG (exp, 0);
2064
2065 switch (DECL_FUNCTION_CODE (fndecl))
2066 {
2067 CASE_FLT_FN (BUILT_IN_SIN):
2068 CASE_FLT_FN (BUILT_IN_COS):
2069 builtin_optab = sincos_optab; break;
2070 default:
2071 gcc_unreachable ();
2072 }
2073
2074 /* Make a suitable register to place result in. */
2075 mode = TYPE_MODE (TREE_TYPE (exp));
2076
2077 /* Check if sincos insn is available, otherwise fallback
2078 to sin or cos insn. */
2079 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2080 switch (DECL_FUNCTION_CODE (fndecl))
2081 {
2082 CASE_FLT_FN (BUILT_IN_SIN):
2083 builtin_optab = sin_optab; break;
2084 CASE_FLT_FN (BUILT_IN_COS):
2085 builtin_optab = cos_optab; break;
2086 default:
2087 gcc_unreachable ();
2088 }
2089
2090 /* Before working hard, check whether the instruction is available. */
2091 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2092 {
2093 rtx result = gen_reg_rtx (mode);
2094
2095 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2096 need to expand the argument again. This way, we will not perform
2097 side-effects more the once. */
2098 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2099
2100 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2101
2102 start_sequence ();
2103
2104 /* Compute into RESULT.
2105 Set RESULT to wherever the result comes back. */
2106 if (builtin_optab == sincos_optab)
2107 {
2108 int ok;
2109
2110 switch (DECL_FUNCTION_CODE (fndecl))
2111 {
2112 CASE_FLT_FN (BUILT_IN_SIN):
2113 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2114 break;
2115 CASE_FLT_FN (BUILT_IN_COS):
2116 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2117 break;
2118 default:
2119 gcc_unreachable ();
2120 }
2121 gcc_assert (ok);
2122 }
2123 else
2124 result = expand_unop (mode, builtin_optab, op0, result, 0);
2125
2126 if (result != 0)
2127 {
2128 /* Output the entire sequence. */
2129 insns = get_insns ();
2130 end_sequence ();
2131 emit_insn (insns);
2132 return result;
2133 }
2134
2135 /* If we were unable to expand via the builtin, stop the sequence
2136 (without outputting the insns) and call to the library function
2137 with the stabilized argument list. */
2138 end_sequence ();
2139 }
2140
2141 return expand_call (exp, target, target == const0_rtx);
2142 }
2143
2144 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2145 return an RTL instruction code that implements the functionality.
2146 If that isn't possible or available return CODE_FOR_nothing. */
2147
2148 static enum insn_code
2149 interclass_mathfn_icode (tree arg, tree fndecl)
2150 {
2151 bool errno_set = false;
2152 optab builtin_optab = unknown_optab;
2153 machine_mode mode;
2154
2155 switch (DECL_FUNCTION_CODE (fndecl))
2156 {
2157 CASE_FLT_FN (BUILT_IN_ILOGB):
2158 errno_set = true; builtin_optab = ilogb_optab; break;
2159 CASE_FLT_FN (BUILT_IN_ISINF):
2160 builtin_optab = isinf_optab; break;
2161 case BUILT_IN_ISNORMAL:
2162 case BUILT_IN_ISFINITE:
2163 CASE_FLT_FN (BUILT_IN_FINITE):
2164 case BUILT_IN_FINITED32:
2165 case BUILT_IN_FINITED64:
2166 case BUILT_IN_FINITED128:
2167 case BUILT_IN_ISINFD32:
2168 case BUILT_IN_ISINFD64:
2169 case BUILT_IN_ISINFD128:
2170 /* These builtins have no optabs (yet). */
2171 break;
2172 default:
2173 gcc_unreachable ();
2174 }
2175
2176 /* There's no easy way to detect the case we need to set EDOM. */
2177 if (flag_errno_math && errno_set)
2178 return CODE_FOR_nothing;
2179
2180 /* Optab mode depends on the mode of the input argument. */
2181 mode = TYPE_MODE (TREE_TYPE (arg));
2182
2183 if (builtin_optab)
2184 return optab_handler (builtin_optab, mode);
2185 return CODE_FOR_nothing;
2186 }
2187
2188 /* Expand a call to one of the builtin math functions that operate on
2189 floating point argument and output an integer result (ilogb, isinf,
2190 isnan, etc).
2191 Return 0 if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET. */
2194
2195 static rtx
2196 expand_builtin_interclass_mathfn (tree exp, rtx target)
2197 {
2198 enum insn_code icode = CODE_FOR_nothing;
2199 rtx op0;
2200 tree fndecl = get_callee_fndecl (exp);
2201 machine_mode mode;
2202 tree arg;
2203
2204 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2205 return NULL_RTX;
2206
2207 arg = CALL_EXPR_ARG (exp, 0);
2208 icode = interclass_mathfn_icode (arg, fndecl);
2209 mode = TYPE_MODE (TREE_TYPE (arg));
2210
2211 if (icode != CODE_FOR_nothing)
2212 {
2213 struct expand_operand ops[1];
2214 rtx_insn *last = get_last_insn ();
2215 tree orig_arg = arg;
2216
2217 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2218 need to expand the argument again. This way, we will not perform
2219 side-effects more the once. */
2220 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2221
2222 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2223
2224 if (mode != GET_MODE (op0))
2225 op0 = convert_to_mode (mode, op0, 0);
2226
2227 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2228 if (maybe_legitimize_operands (icode, 0, 1, ops)
2229 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2230 return ops[0].value;
2231
2232 delete_insns_since (last);
2233 CALL_EXPR_ARG (exp, 0) = orig_arg;
2234 }
2235
2236 return NULL_RTX;
2237 }
2238
2239 /* Expand a call to the builtin sincos math function.
2240 Return NULL_RTX if a normal call should be emitted rather than expanding the
2241 function in-line. EXP is the expression that is a call to the builtin
2242 function. */
2243
2244 static rtx
2245 expand_builtin_sincos (tree exp)
2246 {
2247 rtx op0, op1, op2, target1, target2;
2248 machine_mode mode;
2249 tree arg, sinp, cosp;
2250 int result;
2251 location_t loc = EXPR_LOCATION (exp);
2252 tree alias_type, alias_off;
2253
2254 if (!validate_arglist (exp, REAL_TYPE,
2255 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2256 return NULL_RTX;
2257
2258 arg = CALL_EXPR_ARG (exp, 0);
2259 sinp = CALL_EXPR_ARG (exp, 1);
2260 cosp = CALL_EXPR_ARG (exp, 2);
2261
2262 /* Make a suitable register to place result in. */
2263 mode = TYPE_MODE (TREE_TYPE (arg));
2264
2265 /* Check if sincos insn is available, otherwise emit the call. */
2266 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2267 return NULL_RTX;
2268
2269 target1 = gen_reg_rtx (mode);
2270 target2 = gen_reg_rtx (mode);
2271
2272 op0 = expand_normal (arg);
2273 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2274 alias_off = build_int_cst (alias_type, 0);
2275 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2276 sinp, alias_off));
2277 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2278 cosp, alias_off));
2279
2280 /* Compute into target1 and target2.
2281 Set TARGET to wherever the result comes back. */
2282 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2283 gcc_assert (result);
2284
2285 /* Move target1 and target2 to the memory locations indicated
2286 by op1 and op2. */
2287 emit_move_insn (op1, target1);
2288 emit_move_insn (op2, target2);
2289
2290 return const0_rtx;
2291 }
2292
2293 /* Expand a call to the internal cexpi builtin to the sincos math function.
2294 EXP is the expression that is a call to the builtin function; if convenient,
2295 the result should be placed in TARGET. */
2296
2297 static rtx
2298 expand_builtin_cexpi (tree exp, rtx target)
2299 {
2300 tree fndecl = get_callee_fndecl (exp);
2301 tree arg, type;
2302 machine_mode mode;
2303 rtx op0, op1, op2;
2304 location_t loc = EXPR_LOCATION (exp);
2305
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
2308
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 type = TREE_TYPE (arg);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2312
2313 /* Try expanding via a sincos optab, fall back to emitting a libcall
2314 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2315 is only generated from sincos, cexp or if we have either of them. */
2316 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2317 {
2318 op1 = gen_reg_rtx (mode);
2319 op2 = gen_reg_rtx (mode);
2320
2321 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2322
2323 /* Compute into op1 and op2. */
2324 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2325 }
2326 else if (targetm.libc_has_function (function_sincos))
2327 {
2328 tree call, fn = NULL_TREE;
2329 tree top1, top2;
2330 rtx op1a, op2a;
2331
2332 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2333 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2334 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2335 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2336 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2337 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2338 else
2339 gcc_unreachable ();
2340
2341 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2342 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2343 op1a = copy_addr_to_reg (XEXP (op1, 0));
2344 op2a = copy_addr_to_reg (XEXP (op2, 0));
2345 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2346 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2347
2348 /* Make sure not to fold the sincos call again. */
2349 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2350 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2351 call, 3, arg, top1, top2));
2352 }
2353 else
2354 {
2355 tree call, fn = NULL_TREE, narg;
2356 tree ctype = build_complex_type (type);
2357
2358 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2359 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2360 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2361 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2362 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2363 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2364 else
2365 gcc_unreachable ();
2366
2367 /* If we don't have a decl for cexp create one. This is the
2368 friendliest fallback if the user calls __builtin_cexpi
2369 without full target C99 function support. */
2370 if (fn == NULL_TREE)
2371 {
2372 tree fntype;
2373 const char *name = NULL;
2374
2375 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2376 name = "cexpf";
2377 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2378 name = "cexp";
2379 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2380 name = "cexpl";
2381
2382 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2383 fn = build_fn_decl (name, fntype);
2384 }
2385
2386 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2387 build_real (type, dconst0), arg);
2388
2389 /* Make sure not to fold the cexp call again. */
2390 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2391 return expand_expr (build_call_nary (ctype, call, 1, narg),
2392 target, VOIDmode, EXPAND_NORMAL);
2393 }
2394
2395 /* Now build the proper return type. */
2396 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2397 make_tree (TREE_TYPE (arg), op2),
2398 make_tree (TREE_TYPE (arg), op1)),
2399 target, VOIDmode, EXPAND_NORMAL);
2400 }
2401
2402 /* Conveniently construct a function call expression. FNDECL names the
2403 function to be called, N is the number of arguments, and the "..."
2404 parameters are the argument expressions. Unlike build_call_exr
2405 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2406
2407 static tree
2408 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2409 {
2410 va_list ap;
2411 tree fntype = TREE_TYPE (fndecl);
2412 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2413
2414 va_start (ap, n);
2415 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2416 va_end (ap);
2417 SET_EXPR_LOCATION (fn, loc);
2418 return fn;
2419 }
2420
2421 /* Expand a call to one of the builtin rounding functions gcc defines
2422 as an extension (lfloor and lceil). As these are gcc extensions we
2423 do not need to worry about setting errno to EDOM.
2424 If expanding via optab fails, lower expression to (int)(floor(x)).
2425 EXP is the expression that is a call to the builtin function;
2426 if convenient, the result should be placed in TARGET. */
2427
2428 static rtx
2429 expand_builtin_int_roundingfn (tree exp, rtx target)
2430 {
2431 convert_optab builtin_optab;
2432 rtx op0, tmp;
2433 rtx_insn *insns;
2434 tree fndecl = get_callee_fndecl (exp);
2435 enum built_in_function fallback_fn;
2436 tree fallback_fndecl;
2437 machine_mode mode;
2438 tree arg;
2439
2440 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2441 gcc_unreachable ();
2442
2443 arg = CALL_EXPR_ARG (exp, 0);
2444
2445 switch (DECL_FUNCTION_CODE (fndecl))
2446 {
2447 CASE_FLT_FN (BUILT_IN_ICEIL):
2448 CASE_FLT_FN (BUILT_IN_LCEIL):
2449 CASE_FLT_FN (BUILT_IN_LLCEIL):
2450 builtin_optab = lceil_optab;
2451 fallback_fn = BUILT_IN_CEIL;
2452 break;
2453
2454 CASE_FLT_FN (BUILT_IN_IFLOOR):
2455 CASE_FLT_FN (BUILT_IN_LFLOOR):
2456 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2457 builtin_optab = lfloor_optab;
2458 fallback_fn = BUILT_IN_FLOOR;
2459 break;
2460
2461 default:
2462 gcc_unreachable ();
2463 }
2464
2465 /* Make a suitable register to place result in. */
2466 mode = TYPE_MODE (TREE_TYPE (exp));
2467
2468 target = gen_reg_rtx (mode);
2469
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
2473 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2474
2475 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2476
2477 start_sequence ();
2478
2479 /* Compute into TARGET. */
2480 if (expand_sfix_optab (target, op0, builtin_optab))
2481 {
2482 /* Output the entire sequence. */
2483 insns = get_insns ();
2484 end_sequence ();
2485 emit_insn (insns);
2486 return target;
2487 }
2488
2489 /* If we were unable to expand via the builtin, stop the sequence
2490 (without outputting the insns). */
2491 end_sequence ();
2492
2493 /* Fall back to floating point rounding optab. */
2494 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2495
2496 /* For non-C99 targets we may end up without a fallback fndecl here
2497 if the user called __builtin_lfloor directly. In this case emit
2498 a call to the floor/ceil variants nevertheless. This should result
2499 in the best user experience for not full C99 targets. */
2500 if (fallback_fndecl == NULL_TREE)
2501 {
2502 tree fntype;
2503 const char *name = NULL;
2504
2505 switch (DECL_FUNCTION_CODE (fndecl))
2506 {
2507 case BUILT_IN_ICEIL:
2508 case BUILT_IN_LCEIL:
2509 case BUILT_IN_LLCEIL:
2510 name = "ceil";
2511 break;
2512 case BUILT_IN_ICEILF:
2513 case BUILT_IN_LCEILF:
2514 case BUILT_IN_LLCEILF:
2515 name = "ceilf";
2516 break;
2517 case BUILT_IN_ICEILL:
2518 case BUILT_IN_LCEILL:
2519 case BUILT_IN_LLCEILL:
2520 name = "ceill";
2521 break;
2522 case BUILT_IN_IFLOOR:
2523 case BUILT_IN_LFLOOR:
2524 case BUILT_IN_LLFLOOR:
2525 name = "floor";
2526 break;
2527 case BUILT_IN_IFLOORF:
2528 case BUILT_IN_LFLOORF:
2529 case BUILT_IN_LLFLOORF:
2530 name = "floorf";
2531 break;
2532 case BUILT_IN_IFLOORL:
2533 case BUILT_IN_LFLOORL:
2534 case BUILT_IN_LLFLOORL:
2535 name = "floorl";
2536 break;
2537 default:
2538 gcc_unreachable ();
2539 }
2540
2541 fntype = build_function_type_list (TREE_TYPE (arg),
2542 TREE_TYPE (arg), NULL_TREE);
2543 fallback_fndecl = build_fn_decl (name, fntype);
2544 }
2545
2546 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2547
2548 tmp = expand_normal (exp);
2549 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2550
2551 /* Truncate the result of floating point optab to integer
2552 via expand_fix (). */
2553 target = gen_reg_rtx (mode);
2554 expand_fix (target, tmp, 0);
2555
2556 return target;
2557 }
2558
2559 /* Expand a call to one of the builtin math functions doing integer
2560 conversion (lrint).
2561 Return 0 if a normal call should be emitted rather than expanding the
2562 function in-line. EXP is the expression that is a call to the builtin
2563 function; if convenient, the result should be placed in TARGET. */
2564
2565 static rtx
2566 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2567 {
2568 convert_optab builtin_optab;
2569 rtx op0;
2570 rtx_insn *insns;
2571 tree fndecl = get_callee_fndecl (exp);
2572 tree arg;
2573 machine_mode mode;
2574 enum built_in_function fallback_fn = BUILT_IN_NONE;
2575
2576 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2577 gcc_unreachable ();
2578
2579 arg = CALL_EXPR_ARG (exp, 0);
2580
2581 switch (DECL_FUNCTION_CODE (fndecl))
2582 {
2583 CASE_FLT_FN (BUILT_IN_IRINT):
2584 fallback_fn = BUILT_IN_LRINT;
2585 gcc_fallthrough ();
2586 CASE_FLT_FN (BUILT_IN_LRINT):
2587 CASE_FLT_FN (BUILT_IN_LLRINT):
2588 builtin_optab = lrint_optab;
2589 break;
2590
2591 CASE_FLT_FN (BUILT_IN_IROUND):
2592 fallback_fn = BUILT_IN_LROUND;
2593 gcc_fallthrough ();
2594 CASE_FLT_FN (BUILT_IN_LROUND):
2595 CASE_FLT_FN (BUILT_IN_LLROUND):
2596 builtin_optab = lround_optab;
2597 break;
2598
2599 default:
2600 gcc_unreachable ();
2601 }
2602
2603 /* There's no easy way to detect the case we need to set EDOM. */
2604 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2605 return NULL_RTX;
2606
2607 /* Make a suitable register to place result in. */
2608 mode = TYPE_MODE (TREE_TYPE (exp));
2609
2610 /* There's no easy way to detect the case we need to set EDOM. */
2611 if (!flag_errno_math)
2612 {
2613 rtx result = gen_reg_rtx (mode);
2614
2615 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2616 need to expand the argument again. This way, we will not perform
2617 side-effects more the once. */
2618 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2619
2620 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2621
2622 start_sequence ();
2623
2624 if (expand_sfix_optab (result, op0, builtin_optab))
2625 {
2626 /* Output the entire sequence. */
2627 insns = get_insns ();
2628 end_sequence ();
2629 emit_insn (insns);
2630 return result;
2631 }
2632
2633 /* If we were unable to expand via the builtin, stop the sequence
2634 (without outputting the insns) and call to the library function
2635 with the stabilized argument list. */
2636 end_sequence ();
2637 }
2638
2639 if (fallback_fn != BUILT_IN_NONE)
2640 {
2641 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2642 targets, (int) round (x) should never be transformed into
2643 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2644 a call to lround in the hope that the target provides at least some
2645 C99 functions. This should result in the best user experience for
2646 not full C99 targets. */
2647 tree fallback_fndecl = mathfn_built_in_1
2648 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2649
2650 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2651 fallback_fndecl, 1, arg);
2652
2653 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2654 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2655 return convert_to_mode (mode, target, 0);
2656 }
2657
2658 return expand_call (exp, target, target == const0_rtx);
2659 }
2660
2661 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2662 a normal call should be emitted rather than expanding the function
2663 in-line. EXP is the expression that is a call to the builtin
2664 function; if convenient, the result should be placed in TARGET. */
2665
2666 static rtx
2667 expand_builtin_powi (tree exp, rtx target)
2668 {
2669 tree arg0, arg1;
2670 rtx op0, op1;
2671 machine_mode mode;
2672 machine_mode mode2;
2673
2674 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2675 return NULL_RTX;
2676
2677 arg0 = CALL_EXPR_ARG (exp, 0);
2678 arg1 = CALL_EXPR_ARG (exp, 1);
2679 mode = TYPE_MODE (TREE_TYPE (exp));
2680
2681 /* Emit a libcall to libgcc. */
2682
2683 /* Mode of the 2nd argument must match that of an int. */
2684 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2685
2686 if (target == NULL_RTX)
2687 target = gen_reg_rtx (mode);
2688
2689 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2690 if (GET_MODE (op0) != mode)
2691 op0 = convert_to_mode (mode, op0, 0);
2692 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2693 if (GET_MODE (op1) != mode2)
2694 op1 = convert_to_mode (mode2, op1, 0);
2695
2696 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2697 target, LCT_CONST, mode, 2,
2698 op0, mode, op1, mode2);
2699
2700 return target;
2701 }
2702
2703 /* Expand expression EXP which is a call to the strlen builtin. Return
2704 NULL_RTX if we failed the caller should emit a normal call, otherwise
2705 try to get the result in TARGET, if convenient. */
2706
2707 static rtx
2708 expand_builtin_strlen (tree exp, rtx target,
2709 machine_mode target_mode)
2710 {
2711 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2712 return NULL_RTX;
2713 else
2714 {
2715 struct expand_operand ops[4];
2716 rtx pat;
2717 tree len;
2718 tree src = CALL_EXPR_ARG (exp, 0);
2719 rtx src_reg;
2720 rtx_insn *before_strlen;
2721 machine_mode insn_mode = target_mode;
2722 enum insn_code icode = CODE_FOR_nothing;
2723 unsigned int align;
2724
2725 /* If the length can be computed at compile-time, return it. */
2726 len = c_strlen (src, 0);
2727 if (len)
2728 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2729
2730 /* If the length can be computed at compile-time and is constant
2731 integer, but there are side-effects in src, evaluate
2732 src for side-effects, then return len.
2733 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2734 can be optimized into: i++; x = 3; */
2735 len = c_strlen (src, 1);
2736 if (len && TREE_CODE (len) == INTEGER_CST)
2737 {
2738 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2739 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2740 }
2741
2742 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2743
2744 /* If SRC is not a pointer type, don't do this operation inline. */
2745 if (align == 0)
2746 return NULL_RTX;
2747
2748 /* Bail out if we can't compute strlen in the right mode. */
2749 while (insn_mode != VOIDmode)
2750 {
2751 icode = optab_handler (strlen_optab, insn_mode);
2752 if (icode != CODE_FOR_nothing)
2753 break;
2754
2755 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2756 }
2757 if (insn_mode == VOIDmode)
2758 return NULL_RTX;
2759
2760 /* Make a place to hold the source address. We will not expand
2761 the actual source until we are sure that the expansion will
2762 not fail -- there are trees that cannot be expanded twice. */
2763 src_reg = gen_reg_rtx (Pmode);
2764
2765 /* Mark the beginning of the strlen sequence so we can emit the
2766 source operand later. */
2767 before_strlen = get_last_insn ();
2768
2769 create_output_operand (&ops[0], target, insn_mode);
2770 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2771 create_integer_operand (&ops[2], 0);
2772 create_integer_operand (&ops[3], align);
2773 if (!maybe_expand_insn (icode, 4, ops))
2774 return NULL_RTX;
2775
2776 /* Now that we are assured of success, expand the source. */
2777 start_sequence ();
2778 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2779 if (pat != src_reg)
2780 {
2781 #ifdef POINTERS_EXTEND_UNSIGNED
2782 if (GET_MODE (pat) != Pmode)
2783 pat = convert_to_mode (Pmode, pat,
2784 POINTERS_EXTEND_UNSIGNED);
2785 #endif
2786 emit_move_insn (src_reg, pat);
2787 }
2788 pat = get_insns ();
2789 end_sequence ();
2790
2791 if (before_strlen)
2792 emit_insn_after (pat, before_strlen);
2793 else
2794 emit_insn_before (pat, get_insns ());
2795
2796 /* Return the value in the proper mode for this function. */
2797 if (GET_MODE (ops[0].value) == target_mode)
2798 target = ops[0].value;
2799 else if (target != 0)
2800 convert_move (target, ops[0].value, 0);
2801 else
2802 target = convert_to_mode (target_mode, ops[0].value, 0);
2803
2804 return target;
2805 }
2806 }
2807
2808 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2809 bytes from constant string DATA + OFFSET and return it as target
2810 constant. */
2811
2812 static rtx
2813 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2814 machine_mode mode)
2815 {
2816 const char *str = (const char *) data;
2817
2818 gcc_assert (offset >= 0
2819 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2820 <= strlen (str) + 1));
2821
2822 return c_readstr (str + offset, mode);
2823 }
2824
2825 /* LEN specify length of the block of memcpy/memset operation.
2826 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2827 In some cases we can make very likely guess on max size, then we
2828 set it into PROBABLE_MAX_SIZE. */
2829
2830 static void
2831 determine_block_size (tree len, rtx len_rtx,
2832 unsigned HOST_WIDE_INT *min_size,
2833 unsigned HOST_WIDE_INT *max_size,
2834 unsigned HOST_WIDE_INT *probable_max_size)
2835 {
2836 if (CONST_INT_P (len_rtx))
2837 {
2838 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2839 return;
2840 }
2841 else
2842 {
2843 wide_int min, max;
2844 enum value_range_type range_type = VR_UNDEFINED;
2845
2846 /* Determine bounds from the type. */
2847 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2848 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2849 else
2850 *min_size = 0;
2851 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2852 *probable_max_size = *max_size
2853 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2854 else
2855 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2856
2857 if (TREE_CODE (len) == SSA_NAME)
2858 range_type = get_range_info (len, &min, &max);
2859 if (range_type == VR_RANGE)
2860 {
2861 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2862 *min_size = min.to_uhwi ();
2863 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2864 *probable_max_size = *max_size = max.to_uhwi ();
2865 }
2866 else if (range_type == VR_ANTI_RANGE)
2867 {
2868 /* Anti range 0...N lets us to determine minimal size to N+1. */
2869 if (min == 0)
2870 {
2871 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2872 *min_size = max.to_uhwi () + 1;
2873 }
2874 /* Code like
2875
2876 int n;
2877 if (n < 100)
2878 memcpy (a, b, n)
2879
2880 Produce anti range allowing negative values of N. We still
2881 can use the information and make a guess that N is not negative.
2882 */
2883 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2884 *probable_max_size = min.to_uhwi () - 1;
2885 }
2886 }
2887 gcc_checking_assert (*max_size <=
2888 (unsigned HOST_WIDE_INT)
2889 GET_MODE_MASK (GET_MODE (len_rtx)));
2890 }
2891
2892 /* Helper function to do the actual work for expand_builtin_memcpy. */
2893
2894 static rtx
2895 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2896 {
2897 const char *src_str;
2898 unsigned int src_align = get_pointer_alignment (src);
2899 unsigned int dest_align = get_pointer_alignment (dest);
2900 rtx dest_mem, src_mem, dest_addr, len_rtx;
2901 HOST_WIDE_INT expected_size = -1;
2902 unsigned int expected_align = 0;
2903 unsigned HOST_WIDE_INT min_size;
2904 unsigned HOST_WIDE_INT max_size;
2905 unsigned HOST_WIDE_INT probable_max_size;
2906
2907 /* If DEST is not a pointer type, call the normal function. */
2908 if (dest_align == 0)
2909 return NULL_RTX;
2910
2911 /* If either SRC is not a pointer type, don't do this
2912 operation in-line. */
2913 if (src_align == 0)
2914 return NULL_RTX;
2915
2916 if (currently_expanding_gimple_stmt)
2917 stringop_block_profile (currently_expanding_gimple_stmt,
2918 &expected_align, &expected_size);
2919
2920 if (expected_align < dest_align)
2921 expected_align = dest_align;
2922 dest_mem = get_memory_rtx (dest, len);
2923 set_mem_align (dest_mem, dest_align);
2924 len_rtx = expand_normal (len);
2925 determine_block_size (len, len_rtx, &min_size, &max_size,
2926 &probable_max_size);
2927 src_str = c_getstr (src);
2928
2929 /* If SRC is a string constant and block move would be done
2930 by pieces, we can avoid loading the string from memory
2931 and only stored the computed constants. */
2932 if (src_str
2933 && CONST_INT_P (len_rtx)
2934 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2935 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2936 CONST_CAST (char *, src_str),
2937 dest_align, false))
2938 {
2939 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2940 builtin_memcpy_read_str,
2941 CONST_CAST (char *, src_str),
2942 dest_align, false, 0);
2943 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2944 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2945 return dest_mem;
2946 }
2947
2948 src_mem = get_memory_rtx (src, len);
2949 set_mem_align (src_mem, src_align);
2950
2951 /* Copy word part most expediently. */
2952 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2953 CALL_EXPR_TAILCALL (exp)
2954 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2955 expected_align, expected_size,
2956 min_size, max_size, probable_max_size);
2957
2958 if (dest_addr == 0)
2959 {
2960 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2961 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2962 }
2963
2964 return dest_addr;
2965 }
2966
2967 /* Expand a call EXP to the memcpy builtin.
2968 Return NULL_RTX if we failed, the caller should emit a normal call,
2969 otherwise try to get the result in TARGET, if convenient (and in
2970 mode MODE if that's convenient). */
2971
2972 static rtx
2973 expand_builtin_memcpy (tree exp, rtx target)
2974 {
2975 if (!validate_arglist (exp,
2976 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2977 return NULL_RTX;
2978 else
2979 {
2980 tree dest = CALL_EXPR_ARG (exp, 0);
2981 tree src = CALL_EXPR_ARG (exp, 1);
2982 tree len = CALL_EXPR_ARG (exp, 2);
2983 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2984 }
2985 }
2986
2987 /* Expand an instrumented call EXP to the memcpy builtin.
2988 Return NULL_RTX if we failed, the caller should emit a normal call,
2989 otherwise try to get the result in TARGET, if convenient (and in
2990 mode MODE if that's convenient). */
2991
2992 static rtx
2993 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
2994 {
2995 if (!validate_arglist (exp,
2996 POINTER_TYPE, POINTER_BOUNDS_TYPE,
2997 POINTER_TYPE, POINTER_BOUNDS_TYPE,
2998 INTEGER_TYPE, VOID_TYPE))
2999 return NULL_RTX;
3000 else
3001 {
3002 tree dest = CALL_EXPR_ARG (exp, 0);
3003 tree src = CALL_EXPR_ARG (exp, 2);
3004 tree len = CALL_EXPR_ARG (exp, 4);
3005 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3006
3007 /* Return src bounds with the result. */
3008 if (res)
3009 {
3010 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3011 expand_normal (CALL_EXPR_ARG (exp, 1)));
3012 res = chkp_join_splitted_slot (res, bnd);
3013 }
3014 return res;
3015 }
3016 }
3017
3018 /* Expand a call EXP to the mempcpy builtin.
3019 Return NULL_RTX if we failed; the caller should emit a normal call,
3020 otherwise try to get the result in TARGET, if convenient (and in
3021 mode MODE if that's convenient). If ENDP is 0 return the
3022 destination pointer, if ENDP is 1 return the end pointer ala
3023 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3024 stpcpy. */
3025
3026 static rtx
3027 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3028 {
3029 if (!validate_arglist (exp,
3030 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3031 return NULL_RTX;
3032 else
3033 {
3034 tree dest = CALL_EXPR_ARG (exp, 0);
3035 tree src = CALL_EXPR_ARG (exp, 1);
3036 tree len = CALL_EXPR_ARG (exp, 2);
3037 return expand_builtin_mempcpy_args (dest, src, len,
3038 target, mode, /*endp=*/ 1,
3039 exp);
3040 }
3041 }
3042
3043 /* Expand an instrumented call EXP to the mempcpy builtin.
3044 Return NULL_RTX if we failed, the caller should emit a normal call,
3045 otherwise try to get the result in TARGET, if convenient (and in
3046 mode MODE if that's convenient). */
3047
3048 static rtx
3049 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3050 {
3051 if (!validate_arglist (exp,
3052 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3053 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3054 INTEGER_TYPE, VOID_TYPE))
3055 return NULL_RTX;
3056 else
3057 {
3058 tree dest = CALL_EXPR_ARG (exp, 0);
3059 tree src = CALL_EXPR_ARG (exp, 2);
3060 tree len = CALL_EXPR_ARG (exp, 4);
3061 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3062 mode, 1, exp);
3063
3064 /* Return src bounds with the result. */
3065 if (res)
3066 {
3067 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3068 expand_normal (CALL_EXPR_ARG (exp, 1)));
3069 res = chkp_join_splitted_slot (res, bnd);
3070 }
3071 return res;
3072 }
3073 }
3074
3075 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3076 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3077 so that this can also be called without constructing an actual CALL_EXPR.
3078 The other arguments and return value are the same as for
3079 expand_builtin_mempcpy. */
3080
3081 static rtx
3082 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3083 rtx target, machine_mode mode, int endp,
3084 tree orig_exp)
3085 {
3086 tree fndecl = get_callee_fndecl (orig_exp);
3087
3088 /* If return value is ignored, transform mempcpy into memcpy. */
3089 if (target == const0_rtx
3090 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3091 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3092 {
3093 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3094 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3095 dest, src, len);
3096 return expand_expr (result, target, mode, EXPAND_NORMAL);
3097 }
3098 else if (target == const0_rtx
3099 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3100 {
3101 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3102 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3103 dest, src, len);
3104 return expand_expr (result, target, mode, EXPAND_NORMAL);
3105 }
3106 else
3107 {
3108 const char *src_str;
3109 unsigned int src_align = get_pointer_alignment (src);
3110 unsigned int dest_align = get_pointer_alignment (dest);
3111 rtx dest_mem, src_mem, len_rtx;
3112
3113 /* If either SRC or DEST is not a pointer type, don't do this
3114 operation in-line. */
3115 if (dest_align == 0 || src_align == 0)
3116 return NULL_RTX;
3117
3118 /* If LEN is not constant, call the normal function. */
3119 if (! tree_fits_uhwi_p (len))
3120 return NULL_RTX;
3121
3122 len_rtx = expand_normal (len);
3123 src_str = c_getstr (src);
3124
3125 /* If SRC is a string constant and block move would be done
3126 by pieces, we can avoid loading the string from memory
3127 and only stored the computed constants. */
3128 if (src_str
3129 && CONST_INT_P (len_rtx)
3130 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3131 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3132 CONST_CAST (char *, src_str),
3133 dest_align, false))
3134 {
3135 dest_mem = get_memory_rtx (dest, len);
3136 set_mem_align (dest_mem, dest_align);
3137 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3138 builtin_memcpy_read_str,
3139 CONST_CAST (char *, src_str),
3140 dest_align, false, endp);
3141 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3142 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3143 return dest_mem;
3144 }
3145
3146 if (CONST_INT_P (len_rtx)
3147 && can_move_by_pieces (INTVAL (len_rtx),
3148 MIN (dest_align, src_align)))
3149 {
3150 dest_mem = get_memory_rtx (dest, len);
3151 set_mem_align (dest_mem, dest_align);
3152 src_mem = get_memory_rtx (src, len);
3153 set_mem_align (src_mem, src_align);
3154 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3155 MIN (dest_align, src_align), endp);
3156 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3157 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3158 return dest_mem;
3159 }
3160
3161 return NULL_RTX;
3162 }
3163 }
3164
3165 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3166 we failed, the caller should emit a normal call, otherwise try to
3167 get the result in TARGET, if convenient. If ENDP is 0 return the
3168 destination pointer, if ENDP is 1 return the end pointer ala
3169 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3170 stpcpy. */
3171
3172 static rtx
3173 expand_movstr (tree dest, tree src, rtx target, int endp)
3174 {
3175 struct expand_operand ops[3];
3176 rtx dest_mem;
3177 rtx src_mem;
3178
3179 if (!targetm.have_movstr ())
3180 return NULL_RTX;
3181
3182 dest_mem = get_memory_rtx (dest, NULL);
3183 src_mem = get_memory_rtx (src, NULL);
3184 if (!endp)
3185 {
3186 target = force_reg (Pmode, XEXP (dest_mem, 0));
3187 dest_mem = replace_equiv_address (dest_mem, target);
3188 }
3189
3190 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3191 create_fixed_operand (&ops[1], dest_mem);
3192 create_fixed_operand (&ops[2], src_mem);
3193 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3194 return NULL_RTX;
3195
3196 if (endp && target != const0_rtx)
3197 {
3198 target = ops[0].value;
3199 /* movstr is supposed to set end to the address of the NUL
3200 terminator. If the caller requested a mempcpy-like return value,
3201 adjust it. */
3202 if (endp == 1)
3203 {
3204 rtx tem = plus_constant (GET_MODE (target),
3205 gen_lowpart (GET_MODE (target), target), 1);
3206 emit_move_insn (target, force_operand (tem, NULL_RTX));
3207 }
3208 }
3209 return target;
3210 }
3211
3212 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3213 NULL_RTX if we failed the caller should emit a normal call, otherwise
3214 try to get the result in TARGET, if convenient (and in mode MODE if that's
3215 convenient). */
3216
3217 static rtx
3218 expand_builtin_strcpy (tree exp, rtx target)
3219 {
3220 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3221 {
3222 tree dest = CALL_EXPR_ARG (exp, 0);
3223 tree src = CALL_EXPR_ARG (exp, 1);
3224 return expand_builtin_strcpy_args (dest, src, target);
3225 }
3226 return NULL_RTX;
3227 }
3228
3229 /* Helper function to do the actual work for expand_builtin_strcpy. The
3230 arguments to the builtin_strcpy call DEST and SRC are broken out
3231 so that this can also be called without constructing an actual CALL_EXPR.
3232 The other arguments and return value are the same as for
3233 expand_builtin_strcpy. */
3234
3235 static rtx
3236 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3237 {
3238 return expand_movstr (dest, src, target, /*endp=*/0);
3239 }
3240
3241 /* Expand a call EXP to the stpcpy builtin.
3242 Return NULL_RTX if we failed the caller should emit a normal call,
3243 otherwise try to get the result in TARGET, if convenient (and in
3244 mode MODE if that's convenient). */
3245
3246 static rtx
3247 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3248 {
3249 tree dst, src;
3250 location_t loc = EXPR_LOCATION (exp);
3251
3252 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3253 return NULL_RTX;
3254
3255 dst = CALL_EXPR_ARG (exp, 0);
3256 src = CALL_EXPR_ARG (exp, 1);
3257
3258 /* If return value is ignored, transform stpcpy into strcpy. */
3259 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3260 {
3261 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3262 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3263 return expand_expr (result, target, mode, EXPAND_NORMAL);
3264 }
3265 else
3266 {
3267 tree len, lenp1;
3268 rtx ret;
3269
3270 /* Ensure we get an actual string whose length can be evaluated at
3271 compile-time, not an expression containing a string. This is
3272 because the latter will potentially produce pessimized code
3273 when used to produce the return value. */
3274 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3275 return expand_movstr (dst, src, target, /*endp=*/2);
3276
3277 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3278 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3279 target, mode, /*endp=*/2,
3280 exp);
3281
3282 if (ret)
3283 return ret;
3284
3285 if (TREE_CODE (len) == INTEGER_CST)
3286 {
3287 rtx len_rtx = expand_normal (len);
3288
3289 if (CONST_INT_P (len_rtx))
3290 {
3291 ret = expand_builtin_strcpy_args (dst, src, target);
3292
3293 if (ret)
3294 {
3295 if (! target)
3296 {
3297 if (mode != VOIDmode)
3298 target = gen_reg_rtx (mode);
3299 else
3300 target = gen_reg_rtx (GET_MODE (ret));
3301 }
3302 if (GET_MODE (target) != GET_MODE (ret))
3303 ret = gen_lowpart (GET_MODE (target), ret);
3304
3305 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3306 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3307 gcc_assert (ret);
3308
3309 return target;
3310 }
3311 }
3312 }
3313
3314 return expand_movstr (dst, src, target, /*endp=*/2);
3315 }
3316 }
3317
3318 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3319 bytes from constant string DATA + OFFSET and return it as target
3320 constant. */
3321
3322 rtx
3323 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3324 machine_mode mode)
3325 {
3326 const char *str = (const char *) data;
3327
3328 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3329 return const0_rtx;
3330
3331 return c_readstr (str + offset, mode);
3332 }
3333
3334 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3335 NULL_RTX if we failed the caller should emit a normal call. */
3336
3337 static rtx
3338 expand_builtin_strncpy (tree exp, rtx target)
3339 {
3340 location_t loc = EXPR_LOCATION (exp);
3341
3342 if (validate_arglist (exp,
3343 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3344 {
3345 tree dest = CALL_EXPR_ARG (exp, 0);
3346 tree src = CALL_EXPR_ARG (exp, 1);
3347 tree len = CALL_EXPR_ARG (exp, 2);
3348 tree slen = c_strlen (src, 1);
3349
3350 /* We must be passed a constant len and src parameter. */
3351 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3352 return NULL_RTX;
3353
3354 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3355
3356 /* We're required to pad with trailing zeros if the requested
3357 len is greater than strlen(s2)+1. In that case try to
3358 use store_by_pieces, if it fails, punt. */
3359 if (tree_int_cst_lt (slen, len))
3360 {
3361 unsigned int dest_align = get_pointer_alignment (dest);
3362 const char *p = c_getstr (src);
3363 rtx dest_mem;
3364
3365 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3366 || !can_store_by_pieces (tree_to_uhwi (len),
3367 builtin_strncpy_read_str,
3368 CONST_CAST (char *, p),
3369 dest_align, false))
3370 return NULL_RTX;
3371
3372 dest_mem = get_memory_rtx (dest, len);
3373 store_by_pieces (dest_mem, tree_to_uhwi (len),
3374 builtin_strncpy_read_str,
3375 CONST_CAST (char *, p), dest_align, false, 0);
3376 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3377 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3378 return dest_mem;
3379 }
3380 }
3381 return NULL_RTX;
3382 }
3383
3384 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3385 bytes from constant string DATA + OFFSET and return it as target
3386 constant. */
3387
3388 rtx
3389 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3390 machine_mode mode)
3391 {
3392 const char *c = (const char *) data;
3393 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3394
3395 memset (p, *c, GET_MODE_SIZE (mode));
3396
3397 return c_readstr (p, mode);
3398 }
3399
3400 /* Callback routine for store_by_pieces. Return the RTL of a register
3401 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3402 char value given in the RTL register data. For example, if mode is
3403 4 bytes wide, return the RTL for 0x01010101*data. */
3404
3405 static rtx
3406 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3407 machine_mode mode)
3408 {
3409 rtx target, coeff;
3410 size_t size;
3411 char *p;
3412
3413 size = GET_MODE_SIZE (mode);
3414 if (size == 1)
3415 return (rtx) data;
3416
3417 p = XALLOCAVEC (char, size);
3418 memset (p, 1, size);
3419 coeff = c_readstr (p, mode);
3420
3421 target = convert_to_mode (mode, (rtx) data, 1);
3422 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3423 return force_reg (mode, target);
3424 }
3425
3426 /* Expand expression EXP, which is a call to the memset builtin. Return
3427 NULL_RTX if we failed the caller should emit a normal call, otherwise
3428 try to get the result in TARGET, if convenient (and in mode MODE if that's
3429 convenient). */
3430
3431 static rtx
3432 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3433 {
3434 if (!validate_arglist (exp,
3435 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3436 return NULL_RTX;
3437 else
3438 {
3439 tree dest = CALL_EXPR_ARG (exp, 0);
3440 tree val = CALL_EXPR_ARG (exp, 1);
3441 tree len = CALL_EXPR_ARG (exp, 2);
3442 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3443 }
3444 }
3445
3446 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3447 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3448 try to get the result in TARGET, if convenient (and in mode MODE if that's
3449 convenient). */
3450
3451 static rtx
3452 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3453 {
3454 if (!validate_arglist (exp,
3455 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3456 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3457 return NULL_RTX;
3458 else
3459 {
3460 tree dest = CALL_EXPR_ARG (exp, 0);
3461 tree val = CALL_EXPR_ARG (exp, 2);
3462 tree len = CALL_EXPR_ARG (exp, 3);
3463 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3464
3465 /* Return src bounds with the result. */
3466 if (res)
3467 {
3468 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3469 expand_normal (CALL_EXPR_ARG (exp, 1)));
3470 res = chkp_join_splitted_slot (res, bnd);
3471 }
3472 return res;
3473 }
3474 }
3475
3476 /* Helper function to do the actual work for expand_builtin_memset. The
3477 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3478 so that this can also be called without constructing an actual CALL_EXPR.
3479 The other arguments and return value are the same as for
3480 expand_builtin_memset. */
3481
3482 static rtx
3483 expand_builtin_memset_args (tree dest, tree val, tree len,
3484 rtx target, machine_mode mode, tree orig_exp)
3485 {
3486 tree fndecl, fn;
3487 enum built_in_function fcode;
3488 machine_mode val_mode;
3489 char c;
3490 unsigned int dest_align;
3491 rtx dest_mem, dest_addr, len_rtx;
3492 HOST_WIDE_INT expected_size = -1;
3493 unsigned int expected_align = 0;
3494 unsigned HOST_WIDE_INT min_size;
3495 unsigned HOST_WIDE_INT max_size;
3496 unsigned HOST_WIDE_INT probable_max_size;
3497
3498 dest_align = get_pointer_alignment (dest);
3499
3500 /* If DEST is not a pointer type, don't do this operation in-line. */
3501 if (dest_align == 0)
3502 return NULL_RTX;
3503
3504 if (currently_expanding_gimple_stmt)
3505 stringop_block_profile (currently_expanding_gimple_stmt,
3506 &expected_align, &expected_size);
3507
3508 if (expected_align < dest_align)
3509 expected_align = dest_align;
3510
3511 /* If the LEN parameter is zero, return DEST. */
3512 if (integer_zerop (len))
3513 {
3514 /* Evaluate and ignore VAL in case it has side-effects. */
3515 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3516 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3517 }
3518
3519 /* Stabilize the arguments in case we fail. */
3520 dest = builtin_save_expr (dest);
3521 val = builtin_save_expr (val);
3522 len = builtin_save_expr (len);
3523
3524 len_rtx = expand_normal (len);
3525 determine_block_size (len, len_rtx, &min_size, &max_size,
3526 &probable_max_size);
3527 dest_mem = get_memory_rtx (dest, len);
3528 val_mode = TYPE_MODE (unsigned_char_type_node);
3529
3530 if (TREE_CODE (val) != INTEGER_CST)
3531 {
3532 rtx val_rtx;
3533
3534 val_rtx = expand_normal (val);
3535 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3536
3537 /* Assume that we can memset by pieces if we can store
3538 * the coefficients by pieces (in the required modes).
3539 * We can't pass builtin_memset_gen_str as that emits RTL. */
3540 c = 1;
3541 if (tree_fits_uhwi_p (len)
3542 && can_store_by_pieces (tree_to_uhwi (len),
3543 builtin_memset_read_str, &c, dest_align,
3544 true))
3545 {
3546 val_rtx = force_reg (val_mode, val_rtx);
3547 store_by_pieces (dest_mem, tree_to_uhwi (len),
3548 builtin_memset_gen_str, val_rtx, dest_align,
3549 true, 0);
3550 }
3551 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3552 dest_align, expected_align,
3553 expected_size, min_size, max_size,
3554 probable_max_size))
3555 goto do_libcall;
3556
3557 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3558 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3559 return dest_mem;
3560 }
3561
3562 if (target_char_cast (val, &c))
3563 goto do_libcall;
3564
3565 if (c)
3566 {
3567 if (tree_fits_uhwi_p (len)
3568 && can_store_by_pieces (tree_to_uhwi (len),
3569 builtin_memset_read_str, &c, dest_align,
3570 true))
3571 store_by_pieces (dest_mem, tree_to_uhwi (len),
3572 builtin_memset_read_str, &c, dest_align, true, 0);
3573 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3574 gen_int_mode (c, val_mode),
3575 dest_align, expected_align,
3576 expected_size, min_size, max_size,
3577 probable_max_size))
3578 goto do_libcall;
3579
3580 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3581 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3582 return dest_mem;
3583 }
3584
3585 set_mem_align (dest_mem, dest_align);
3586 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3587 CALL_EXPR_TAILCALL (orig_exp)
3588 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3589 expected_align, expected_size,
3590 min_size, max_size,
3591 probable_max_size);
3592
3593 if (dest_addr == 0)
3594 {
3595 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3596 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3597 }
3598
3599 return dest_addr;
3600
3601 do_libcall:
3602 fndecl = get_callee_fndecl (orig_exp);
3603 fcode = DECL_FUNCTION_CODE (fndecl);
3604 if (fcode == BUILT_IN_MEMSET
3605 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3606 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3607 dest, val, len);
3608 else if (fcode == BUILT_IN_BZERO)
3609 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3610 dest, len);
3611 else
3612 gcc_unreachable ();
3613 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3614 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3615 return expand_call (fn, target, target == const0_rtx);
3616 }
3617
3618 /* Expand expression EXP, which is a call to the bzero builtin. Return
3619 NULL_RTX if we failed the caller should emit a normal call. */
3620
3621 static rtx
3622 expand_builtin_bzero (tree exp)
3623 {
3624 tree dest, size;
3625 location_t loc = EXPR_LOCATION (exp);
3626
3627 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3628 return NULL_RTX;
3629
3630 dest = CALL_EXPR_ARG (exp, 0);
3631 size = CALL_EXPR_ARG (exp, 1);
3632
3633 /* New argument list transforming bzero(ptr x, int y) to
3634 memset(ptr x, int 0, size_t y). This is done this way
3635 so that if it isn't expanded inline, we fallback to
3636 calling bzero instead of memset. */
3637
3638 return expand_builtin_memset_args (dest, integer_zero_node,
3639 fold_convert_loc (loc,
3640 size_type_node, size),
3641 const0_rtx, VOIDmode, exp);
3642 }
3643
3644 /* Try to expand cmpstr operation ICODE with the given operands.
3645 Return the result rtx on success, otherwise return null. */
3646
3647 static rtx
3648 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3649 HOST_WIDE_INT align)
3650 {
3651 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3652
3653 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3654 target = NULL_RTX;
3655
3656 struct expand_operand ops[4];
3657 create_output_operand (&ops[0], target, insn_mode);
3658 create_fixed_operand (&ops[1], arg1_rtx);
3659 create_fixed_operand (&ops[2], arg2_rtx);
3660 create_integer_operand (&ops[3], align);
3661 if (maybe_expand_insn (icode, 4, ops))
3662 return ops[0].value;
3663 return NULL_RTX;
3664 }
3665
3666 /* Expand expression EXP, which is a call to the memcmp built-in function.
3667 Return NULL_RTX if we failed and the caller should emit a normal call,
3668 otherwise try to get the result in TARGET, if convenient.
3669 RESULT_EQ is true if we can relax the returned value to be either zero
3670 or nonzero, without caring about the sign. */
3671
3672 static rtx
3673 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3674 {
3675 if (!validate_arglist (exp,
3676 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3677 return NULL_RTX;
3678
3679 tree arg1 = CALL_EXPR_ARG (exp, 0);
3680 tree arg2 = CALL_EXPR_ARG (exp, 1);
3681 tree len = CALL_EXPR_ARG (exp, 2);
3682 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3683 location_t loc = EXPR_LOCATION (exp);
3684
3685 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3686 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3687
3688 /* If we don't have POINTER_TYPE, call the function. */
3689 if (arg1_align == 0 || arg2_align == 0)
3690 return NULL_RTX;
3691
3692 rtx arg1_rtx = get_memory_rtx (arg1, len);
3693 rtx arg2_rtx = get_memory_rtx (arg2, len);
3694 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3695
3696 /* Set MEM_SIZE as appropriate. */
3697 if (CONST_INT_P (len_rtx))
3698 {
3699 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3700 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3701 }
3702
3703 by_pieces_constfn constfn = NULL;
3704
3705 const char *src_str = c_getstr (arg2);
3706 if (result_eq && src_str == NULL)
3707 {
3708 src_str = c_getstr (arg1);
3709 if (src_str != NULL)
3710 std::swap (arg1_rtx, arg2_rtx);
3711 }
3712
3713 /* If SRC is a string constant and block move would be done
3714 by pieces, we can avoid loading the string from memory
3715 and only stored the computed constants. */
3716 if (src_str
3717 && CONST_INT_P (len_rtx)
3718 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3719 constfn = builtin_memcpy_read_str;
3720
3721 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3722 TREE_TYPE (len), target,
3723 result_eq, constfn,
3724 CONST_CAST (char *, src_str));
3725
3726 if (result)
3727 {
3728 /* Return the value in the proper mode for this function. */
3729 if (GET_MODE (result) == mode)
3730 return result;
3731
3732 if (target != 0)
3733 {
3734 convert_move (target, result, 0);
3735 return target;
3736 }
3737
3738 return convert_to_mode (mode, result, 0);
3739 }
3740
3741 return NULL_RTX;
3742 }
3743
3744 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3745 if we failed the caller should emit a normal call, otherwise try to get
3746 the result in TARGET, if convenient. */
3747
3748 static rtx
3749 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3750 {
3751 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3752 return NULL_RTX;
3753
3754 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3755 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3756 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3757 {
3758 rtx arg1_rtx, arg2_rtx;
3759 tree fndecl, fn;
3760 tree arg1 = CALL_EXPR_ARG (exp, 0);
3761 tree arg2 = CALL_EXPR_ARG (exp, 1);
3762 rtx result = NULL_RTX;
3763
3764 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3765 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3766
3767 /* If we don't have POINTER_TYPE, call the function. */
3768 if (arg1_align == 0 || arg2_align == 0)
3769 return NULL_RTX;
3770
3771 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3772 arg1 = builtin_save_expr (arg1);
3773 arg2 = builtin_save_expr (arg2);
3774
3775 arg1_rtx = get_memory_rtx (arg1, NULL);
3776 arg2_rtx = get_memory_rtx (arg2, NULL);
3777
3778 /* Try to call cmpstrsi. */
3779 if (cmpstr_icode != CODE_FOR_nothing)
3780 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3781 MIN (arg1_align, arg2_align));
3782
3783 /* Try to determine at least one length and call cmpstrnsi. */
3784 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3785 {
3786 tree len;
3787 rtx arg3_rtx;
3788
3789 tree len1 = c_strlen (arg1, 1);
3790 tree len2 = c_strlen (arg2, 1);
3791
3792 if (len1)
3793 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3794 if (len2)
3795 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3796
3797 /* If we don't have a constant length for the first, use the length
3798 of the second, if we know it. We don't require a constant for
3799 this case; some cost analysis could be done if both are available
3800 but neither is constant. For now, assume they're equally cheap,
3801 unless one has side effects. If both strings have constant lengths,
3802 use the smaller. */
3803
3804 if (!len1)
3805 len = len2;
3806 else if (!len2)
3807 len = len1;
3808 else if (TREE_SIDE_EFFECTS (len1))
3809 len = len2;
3810 else if (TREE_SIDE_EFFECTS (len2))
3811 len = len1;
3812 else if (TREE_CODE (len1) != INTEGER_CST)
3813 len = len2;
3814 else if (TREE_CODE (len2) != INTEGER_CST)
3815 len = len1;
3816 else if (tree_int_cst_lt (len1, len2))
3817 len = len1;
3818 else
3819 len = len2;
3820
3821 /* If both arguments have side effects, we cannot optimize. */
3822 if (len && !TREE_SIDE_EFFECTS (len))
3823 {
3824 arg3_rtx = expand_normal (len);
3825 result = expand_cmpstrn_or_cmpmem
3826 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3827 arg3_rtx, MIN (arg1_align, arg2_align));
3828 }
3829 }
3830
3831 if (result)
3832 {
3833 /* Return the value in the proper mode for this function. */
3834 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3835 if (GET_MODE (result) == mode)
3836 return result;
3837 if (target == 0)
3838 return convert_to_mode (mode, result, 0);
3839 convert_move (target, result, 0);
3840 return target;
3841 }
3842
3843 /* Expand the library call ourselves using a stabilized argument
3844 list to avoid re-evaluating the function's arguments twice. */
3845 fndecl = get_callee_fndecl (exp);
3846 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3847 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3848 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3849 return expand_call (fn, target, target == const0_rtx);
3850 }
3851 return NULL_RTX;
3852 }
3853
3854 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3855 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3856 the result in TARGET, if convenient. */
3857
3858 static rtx
3859 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3860 ATTRIBUTE_UNUSED machine_mode mode)
3861 {
3862 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3863
3864 if (!validate_arglist (exp,
3865 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3866 return NULL_RTX;
3867
3868 /* If c_strlen can determine an expression for one of the string
3869 lengths, and it doesn't have side effects, then emit cmpstrnsi
3870 using length MIN(strlen(string)+1, arg3). */
3871 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3872 if (cmpstrn_icode != CODE_FOR_nothing)
3873 {
3874 tree len, len1, len2;
3875 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3876 rtx result;
3877 tree fndecl, fn;
3878 tree arg1 = CALL_EXPR_ARG (exp, 0);
3879 tree arg2 = CALL_EXPR_ARG (exp, 1);
3880 tree arg3 = CALL_EXPR_ARG (exp, 2);
3881
3882 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3883 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3884
3885 len1 = c_strlen (arg1, 1);
3886 len2 = c_strlen (arg2, 1);
3887
3888 if (len1)
3889 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3890 if (len2)
3891 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3892
3893 /* If we don't have a constant length for the first, use the length
3894 of the second, if we know it. We don't require a constant for
3895 this case; some cost analysis could be done if both are available
3896 but neither is constant. For now, assume they're equally cheap,
3897 unless one has side effects. If both strings have constant lengths,
3898 use the smaller. */
3899
3900 if (!len1)
3901 len = len2;
3902 else if (!len2)
3903 len = len1;
3904 else if (TREE_SIDE_EFFECTS (len1))
3905 len = len2;
3906 else if (TREE_SIDE_EFFECTS (len2))
3907 len = len1;
3908 else if (TREE_CODE (len1) != INTEGER_CST)
3909 len = len2;
3910 else if (TREE_CODE (len2) != INTEGER_CST)
3911 len = len1;
3912 else if (tree_int_cst_lt (len1, len2))
3913 len = len1;
3914 else
3915 len = len2;
3916
3917 /* If both arguments have side effects, we cannot optimize. */
3918 if (!len || TREE_SIDE_EFFECTS (len))
3919 return NULL_RTX;
3920
3921 /* The actual new length parameter is MIN(len,arg3). */
3922 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3923 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3924
3925 /* If we don't have POINTER_TYPE, call the function. */
3926 if (arg1_align == 0 || arg2_align == 0)
3927 return NULL_RTX;
3928
3929 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3930 arg1 = builtin_save_expr (arg1);
3931 arg2 = builtin_save_expr (arg2);
3932 len = builtin_save_expr (len);
3933
3934 arg1_rtx = get_memory_rtx (arg1, len);
3935 arg2_rtx = get_memory_rtx (arg2, len);
3936 arg3_rtx = expand_normal (len);
3937 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3938 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3939 MIN (arg1_align, arg2_align));
3940 if (result)
3941 {
3942 /* Return the value in the proper mode for this function. */
3943 mode = TYPE_MODE (TREE_TYPE (exp));
3944 if (GET_MODE (result) == mode)
3945 return result;
3946 if (target == 0)
3947 return convert_to_mode (mode, result, 0);
3948 convert_move (target, result, 0);
3949 return target;
3950 }
3951
3952 /* Expand the library call ourselves using a stabilized argument
3953 list to avoid re-evaluating the function's arguments twice. */
3954 fndecl = get_callee_fndecl (exp);
3955 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3956 arg1, arg2, len);
3957 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3958 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3959 return expand_call (fn, target, target == const0_rtx);
3960 }
3961 return NULL_RTX;
3962 }
3963
3964 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3965 if that's convenient. */
3966
3967 rtx
3968 expand_builtin_saveregs (void)
3969 {
3970 rtx val;
3971 rtx_insn *seq;
3972
3973 /* Don't do __builtin_saveregs more than once in a function.
3974 Save the result of the first call and reuse it. */
3975 if (saveregs_value != 0)
3976 return saveregs_value;
3977
3978 /* When this function is called, it means that registers must be
3979 saved on entry to this function. So we migrate the call to the
3980 first insn of this function. */
3981
3982 start_sequence ();
3983
3984 /* Do whatever the machine needs done in this case. */
3985 val = targetm.calls.expand_builtin_saveregs ();
3986
3987 seq = get_insns ();
3988 end_sequence ();
3989
3990 saveregs_value = val;
3991
3992 /* Put the insns after the NOTE that starts the function. If this
3993 is inside a start_sequence, make the outer-level insn chain current, so
3994 the code is placed at the start of the function. */
3995 push_topmost_sequence ();
3996 emit_insn_after (seq, entry_of_function ());
3997 pop_topmost_sequence ();
3998
3999 return val;
4000 }
4001
4002 /* Expand a call to __builtin_next_arg. */
4003
4004 static rtx
4005 expand_builtin_next_arg (void)
4006 {
4007 /* Checking arguments is already done in fold_builtin_next_arg
4008 that must be called before this function. */
4009 return expand_binop (ptr_mode, add_optab,
4010 crtl->args.internal_arg_pointer,
4011 crtl->args.arg_offset_rtx,
4012 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4013 }
4014
4015 /* Make it easier for the backends by protecting the valist argument
4016 from multiple evaluations. */
4017
4018 static tree
4019 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4020 {
4021 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4022
4023 /* The current way of determining the type of valist is completely
4024 bogus. We should have the information on the va builtin instead. */
4025 if (!vatype)
4026 vatype = targetm.fn_abi_va_list (cfun->decl);
4027
4028 if (TREE_CODE (vatype) == ARRAY_TYPE)
4029 {
4030 if (TREE_SIDE_EFFECTS (valist))
4031 valist = save_expr (valist);
4032
4033 /* For this case, the backends will be expecting a pointer to
4034 vatype, but it's possible we've actually been given an array
4035 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4036 So fix it. */
4037 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4038 {
4039 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4040 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4041 }
4042 }
4043 else
4044 {
4045 tree pt = build_pointer_type (vatype);
4046
4047 if (! needs_lvalue)
4048 {
4049 if (! TREE_SIDE_EFFECTS (valist))
4050 return valist;
4051
4052 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4053 TREE_SIDE_EFFECTS (valist) = 1;
4054 }
4055
4056 if (TREE_SIDE_EFFECTS (valist))
4057 valist = save_expr (valist);
4058 valist = fold_build2_loc (loc, MEM_REF,
4059 vatype, valist, build_int_cst (pt, 0));
4060 }
4061
4062 return valist;
4063 }
4064
4065 /* The "standard" definition of va_list is void*. */
4066
4067 tree
4068 std_build_builtin_va_list (void)
4069 {
4070 return ptr_type_node;
4071 }
4072
4073 /* The "standard" abi va_list is va_list_type_node. */
4074
4075 tree
4076 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4077 {
4078 return va_list_type_node;
4079 }
4080
4081 /* The "standard" type of va_list is va_list_type_node. */
4082
4083 tree
4084 std_canonical_va_list_type (tree type)
4085 {
4086 tree wtype, htype;
4087
4088 wtype = va_list_type_node;
4089 htype = type;
4090
4091 if (TREE_CODE (wtype) == ARRAY_TYPE)
4092 {
4093 /* If va_list is an array type, the argument may have decayed
4094 to a pointer type, e.g. by being passed to another function.
4095 In that case, unwrap both types so that we can compare the
4096 underlying records. */
4097 if (TREE_CODE (htype) == ARRAY_TYPE
4098 || POINTER_TYPE_P (htype))
4099 {
4100 wtype = TREE_TYPE (wtype);
4101 htype = TREE_TYPE (htype);
4102 }
4103 }
4104 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4105 return va_list_type_node;
4106
4107 return NULL_TREE;
4108 }
4109
4110 /* The "standard" implementation of va_start: just assign `nextarg' to
4111 the variable. */
4112
4113 void
4114 std_expand_builtin_va_start (tree valist, rtx nextarg)
4115 {
4116 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4117 convert_move (va_r, nextarg, 0);
4118
4119 /* We do not have any valid bounds for the pointer, so
4120 just store zero bounds for it. */
4121 if (chkp_function_instrumented_p (current_function_decl))
4122 chkp_expand_bounds_reset_for_mem (valist,
4123 make_tree (TREE_TYPE (valist),
4124 nextarg));
4125 }
4126
4127 /* Expand EXP, a call to __builtin_va_start. */
4128
4129 static rtx
4130 expand_builtin_va_start (tree exp)
4131 {
4132 rtx nextarg;
4133 tree valist;
4134 location_t loc = EXPR_LOCATION (exp);
4135
4136 if (call_expr_nargs (exp) < 2)
4137 {
4138 error_at (loc, "too few arguments to function %<va_start%>");
4139 return const0_rtx;
4140 }
4141
4142 if (fold_builtin_next_arg (exp, true))
4143 return const0_rtx;
4144
4145 nextarg = expand_builtin_next_arg ();
4146 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4147
4148 if (targetm.expand_builtin_va_start)
4149 targetm.expand_builtin_va_start (valist, nextarg);
4150 else
4151 std_expand_builtin_va_start (valist, nextarg);
4152
4153 return const0_rtx;
4154 }
4155
4156 /* Expand EXP, a call to __builtin_va_end. */
4157
4158 static rtx
4159 expand_builtin_va_end (tree exp)
4160 {
4161 tree valist = CALL_EXPR_ARG (exp, 0);
4162
4163 /* Evaluate for side effects, if needed. I hate macros that don't
4164 do that. */
4165 if (TREE_SIDE_EFFECTS (valist))
4166 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4167
4168 return const0_rtx;
4169 }
4170
4171 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4172 builtin rather than just as an assignment in stdarg.h because of the
4173 nastiness of array-type va_list types. */
4174
4175 static rtx
4176 expand_builtin_va_copy (tree exp)
4177 {
4178 tree dst, src, t;
4179 location_t loc = EXPR_LOCATION (exp);
4180
4181 dst = CALL_EXPR_ARG (exp, 0);
4182 src = CALL_EXPR_ARG (exp, 1);
4183
4184 dst = stabilize_va_list_loc (loc, dst, 1);
4185 src = stabilize_va_list_loc (loc, src, 0);
4186
4187 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4188
4189 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4190 {
4191 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4192 TREE_SIDE_EFFECTS (t) = 1;
4193 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4194 }
4195 else
4196 {
4197 rtx dstb, srcb, size;
4198
4199 /* Evaluate to pointers. */
4200 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4201 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4202 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4203 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4204
4205 dstb = convert_memory_address (Pmode, dstb);
4206 srcb = convert_memory_address (Pmode, srcb);
4207
4208 /* "Dereference" to BLKmode memories. */
4209 dstb = gen_rtx_MEM (BLKmode, dstb);
4210 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4211 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4212 srcb = gen_rtx_MEM (BLKmode, srcb);
4213 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4214 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4215
4216 /* Copy. */
4217 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4218 }
4219
4220 return const0_rtx;
4221 }
4222
4223 /* Expand a call to one of the builtin functions __builtin_frame_address or
4224 __builtin_return_address. */
4225
4226 static rtx
4227 expand_builtin_frame_address (tree fndecl, tree exp)
4228 {
4229 /* The argument must be a nonnegative integer constant.
4230 It counts the number of frames to scan up the stack.
4231 The value is either the frame pointer value or the return
4232 address saved in that frame. */
4233 if (call_expr_nargs (exp) == 0)
4234 /* Warning about missing arg was already issued. */
4235 return const0_rtx;
4236 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4237 {
4238 error ("invalid argument to %qD", fndecl);
4239 return const0_rtx;
4240 }
4241 else
4242 {
4243 /* Number of frames to scan up the stack. */
4244 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4245
4246 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4247
4248 /* Some ports cannot access arbitrary stack frames. */
4249 if (tem == NULL)
4250 {
4251 warning (0, "unsupported argument to %qD", fndecl);
4252 return const0_rtx;
4253 }
4254
4255 if (count)
4256 {
4257 /* Warn since no effort is made to ensure that any frame
4258 beyond the current one exists or can be safely reached. */
4259 warning (OPT_Wframe_address, "calling %qD with "
4260 "a nonzero argument is unsafe", fndecl);
4261 }
4262
4263 /* For __builtin_frame_address, return what we've got. */
4264 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4265 return tem;
4266
4267 if (!REG_P (tem)
4268 && ! CONSTANT_P (tem))
4269 tem = copy_addr_to_reg (tem);
4270 return tem;
4271 }
4272 }
4273
4274 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4275 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4276 is the same as for allocate_dynamic_stack_space. */
4277
4278 static rtx
4279 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4280 {
4281 rtx op0;
4282 rtx result;
4283 bool valid_arglist;
4284 unsigned int align;
4285 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4286 == BUILT_IN_ALLOCA_WITH_ALIGN);
4287
4288 valid_arglist
4289 = (alloca_with_align
4290 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4291 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4292
4293 if (!valid_arglist)
4294 return NULL_RTX;
4295
4296 /* Compute the argument. */
4297 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4298
4299 /* Compute the alignment. */
4300 align = (alloca_with_align
4301 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4302 : BIGGEST_ALIGNMENT);
4303
4304 /* Allocate the desired space. */
4305 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4306 result = convert_memory_address (ptr_mode, result);
4307
4308 return result;
4309 }
4310
4311 /* Expand a call to bswap builtin in EXP.
4312 Return NULL_RTX if a normal call should be emitted rather than expanding the
4313 function in-line. If convenient, the result should be placed in TARGET.
4314 SUBTARGET may be used as the target for computing one of EXP's operands. */
4315
4316 static rtx
4317 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4318 rtx subtarget)
4319 {
4320 tree arg;
4321 rtx op0;
4322
4323 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4324 return NULL_RTX;
4325
4326 arg = CALL_EXPR_ARG (exp, 0);
4327 op0 = expand_expr (arg,
4328 subtarget && GET_MODE (subtarget) == target_mode
4329 ? subtarget : NULL_RTX,
4330 target_mode, EXPAND_NORMAL);
4331 if (GET_MODE (op0) != target_mode)
4332 op0 = convert_to_mode (target_mode, op0, 1);
4333
4334 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4335
4336 gcc_assert (target);
4337
4338 return convert_to_mode (target_mode, target, 1);
4339 }
4340
4341 /* Expand a call to a unary builtin in EXP.
4342 Return NULL_RTX if a normal call should be emitted rather than expanding the
4343 function in-line. If convenient, the result should be placed in TARGET.
4344 SUBTARGET may be used as the target for computing one of EXP's operands. */
4345
4346 static rtx
4347 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4348 rtx subtarget, optab op_optab)
4349 {
4350 rtx op0;
4351
4352 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4353 return NULL_RTX;
4354
4355 /* Compute the argument. */
4356 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4357 (subtarget
4358 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4359 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4360 VOIDmode, EXPAND_NORMAL);
4361 /* Compute op, into TARGET if possible.
4362 Set TARGET to wherever the result comes back. */
4363 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4364 op_optab, op0, target, op_optab != clrsb_optab);
4365 gcc_assert (target);
4366
4367 return convert_to_mode (target_mode, target, 0);
4368 }
4369
4370 /* Expand a call to __builtin_expect. We just return our argument
4371 as the builtin_expect semantic should've been already executed by
4372 tree branch prediction pass. */
4373
4374 static rtx
4375 expand_builtin_expect (tree exp, rtx target)
4376 {
4377 tree arg;
4378
4379 if (call_expr_nargs (exp) < 2)
4380 return const0_rtx;
4381 arg = CALL_EXPR_ARG (exp, 0);
4382
4383 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4384 /* When guessing was done, the hints should be already stripped away. */
4385 gcc_assert (!flag_guess_branch_prob
4386 || optimize == 0 || seen_error ());
4387 return target;
4388 }
4389
4390 /* Expand a call to __builtin_assume_aligned. We just return our first
4391 argument as the builtin_assume_aligned semantic should've been already
4392 executed by CCP. */
4393
4394 static rtx
4395 expand_builtin_assume_aligned (tree exp, rtx target)
4396 {
4397 if (call_expr_nargs (exp) < 2)
4398 return const0_rtx;
4399 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4400 EXPAND_NORMAL);
4401 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4402 && (call_expr_nargs (exp) < 3
4403 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4404 return target;
4405 }
4406
4407 void
4408 expand_builtin_trap (void)
4409 {
4410 if (targetm.have_trap ())
4411 {
4412 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4413 /* For trap insns when not accumulating outgoing args force
4414 REG_ARGS_SIZE note to prevent crossjumping of calls with
4415 different args sizes. */
4416 if (!ACCUMULATE_OUTGOING_ARGS)
4417 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4418 }
4419 else
4420 {
4421 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4422 tree call_expr = build_call_expr (fn, 0);
4423 expand_call (call_expr, NULL_RTX, false);
4424 }
4425
4426 emit_barrier ();
4427 }
4428
4429 /* Expand a call to __builtin_unreachable. We do nothing except emit
4430 a barrier saying that control flow will not pass here.
4431
4432 It is the responsibility of the program being compiled to ensure
4433 that control flow does never reach __builtin_unreachable. */
4434 static void
4435 expand_builtin_unreachable (void)
4436 {
4437 emit_barrier ();
4438 }
4439
4440 /* Expand EXP, a call to fabs, fabsf or fabsl.
4441 Return NULL_RTX if a normal call should be emitted rather than expanding
4442 the function inline. If convenient, the result should be placed
4443 in TARGET. SUBTARGET may be used as the target for computing
4444 the operand. */
4445
4446 static rtx
4447 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4448 {
4449 machine_mode mode;
4450 tree arg;
4451 rtx op0;
4452
4453 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4454 return NULL_RTX;
4455
4456 arg = CALL_EXPR_ARG (exp, 0);
4457 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4458 mode = TYPE_MODE (TREE_TYPE (arg));
4459 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4460 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4461 }
4462
4463 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4464 Return NULL is a normal call should be emitted rather than expanding the
4465 function inline. If convenient, the result should be placed in TARGET.
4466 SUBTARGET may be used as the target for computing the operand. */
4467
4468 static rtx
4469 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4470 {
4471 rtx op0, op1;
4472 tree arg;
4473
4474 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4475 return NULL_RTX;
4476
4477 arg = CALL_EXPR_ARG (exp, 0);
4478 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4479
4480 arg = CALL_EXPR_ARG (exp, 1);
4481 op1 = expand_normal (arg);
4482
4483 return expand_copysign (op0, op1, target);
4484 }
4485
4486 /* Expand a call to __builtin___clear_cache. */
4487
4488 static rtx
4489 expand_builtin___clear_cache (tree exp)
4490 {
4491 if (!targetm.code_for_clear_cache)
4492 {
4493 #ifdef CLEAR_INSN_CACHE
4494 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4495 does something. Just do the default expansion to a call to
4496 __clear_cache(). */
4497 return NULL_RTX;
4498 #else
4499 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4500 does nothing. There is no need to call it. Do nothing. */
4501 return const0_rtx;
4502 #endif /* CLEAR_INSN_CACHE */
4503 }
4504
4505 /* We have a "clear_cache" insn, and it will handle everything. */
4506 tree begin, end;
4507 rtx begin_rtx, end_rtx;
4508
4509 /* We must not expand to a library call. If we did, any
4510 fallback library function in libgcc that might contain a call to
4511 __builtin___clear_cache() would recurse infinitely. */
4512 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4513 {
4514 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4515 return const0_rtx;
4516 }
4517
4518 if (targetm.have_clear_cache ())
4519 {
4520 struct expand_operand ops[2];
4521
4522 begin = CALL_EXPR_ARG (exp, 0);
4523 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4524
4525 end = CALL_EXPR_ARG (exp, 1);
4526 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4527
4528 create_address_operand (&ops[0], begin_rtx);
4529 create_address_operand (&ops[1], end_rtx);
4530 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4531 return const0_rtx;
4532 }
4533 return const0_rtx;
4534 }
4535
4536 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4537
4538 static rtx
4539 round_trampoline_addr (rtx tramp)
4540 {
4541 rtx temp, addend, mask;
4542
4543 /* If we don't need too much alignment, we'll have been guaranteed
4544 proper alignment by get_trampoline_type. */
4545 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4546 return tramp;
4547
4548 /* Round address up to desired boundary. */
4549 temp = gen_reg_rtx (Pmode);
4550 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4551 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4552
4553 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4554 temp, 0, OPTAB_LIB_WIDEN);
4555 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4556 temp, 0, OPTAB_LIB_WIDEN);
4557
4558 return tramp;
4559 }
4560
4561 static rtx
4562 expand_builtin_init_trampoline (tree exp, bool onstack)
4563 {
4564 tree t_tramp, t_func, t_chain;
4565 rtx m_tramp, r_tramp, r_chain, tmp;
4566
4567 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4568 POINTER_TYPE, VOID_TYPE))
4569 return NULL_RTX;
4570
4571 t_tramp = CALL_EXPR_ARG (exp, 0);
4572 t_func = CALL_EXPR_ARG (exp, 1);
4573 t_chain = CALL_EXPR_ARG (exp, 2);
4574
4575 r_tramp = expand_normal (t_tramp);
4576 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4577 MEM_NOTRAP_P (m_tramp) = 1;
4578
4579 /* If ONSTACK, the TRAMP argument should be the address of a field
4580 within the local function's FRAME decl. Either way, let's see if
4581 we can fill in the MEM_ATTRs for this memory. */
4582 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4583 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4584
4585 /* Creator of a heap trampoline is responsible for making sure the
4586 address is aligned to at least STACK_BOUNDARY. Normally malloc
4587 will ensure this anyhow. */
4588 tmp = round_trampoline_addr (r_tramp);
4589 if (tmp != r_tramp)
4590 {
4591 m_tramp = change_address (m_tramp, BLKmode, tmp);
4592 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4593 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4594 }
4595
4596 /* The FUNC argument should be the address of the nested function.
4597 Extract the actual function decl to pass to the hook. */
4598 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4599 t_func = TREE_OPERAND (t_func, 0);
4600 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4601
4602 r_chain = expand_normal (t_chain);
4603
4604 /* Generate insns to initialize the trampoline. */
4605 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4606
4607 if (onstack)
4608 {
4609 trampolines_created = 1;
4610
4611 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4612 "trampoline generated for nested function %qD", t_func);
4613 }
4614
4615 return const0_rtx;
4616 }
4617
4618 static rtx
4619 expand_builtin_adjust_trampoline (tree exp)
4620 {
4621 rtx tramp;
4622
4623 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4624 return NULL_RTX;
4625
4626 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4627 tramp = round_trampoline_addr (tramp);
4628 if (targetm.calls.trampoline_adjust_address)
4629 tramp = targetm.calls.trampoline_adjust_address (tramp);
4630
4631 return tramp;
4632 }
4633
4634 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4635 function. The function first checks whether the back end provides
4636 an insn to implement signbit for the respective mode. If not, it
4637 checks whether the floating point format of the value is such that
4638 the sign bit can be extracted. If that is not the case, error out.
4639 EXP is the expression that is a call to the builtin function; if
4640 convenient, the result should be placed in TARGET. */
4641 static rtx
4642 expand_builtin_signbit (tree exp, rtx target)
4643 {
4644 const struct real_format *fmt;
4645 machine_mode fmode, imode, rmode;
4646 tree arg;
4647 int word, bitpos;
4648 enum insn_code icode;
4649 rtx temp;
4650 location_t loc = EXPR_LOCATION (exp);
4651
4652 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4653 return NULL_RTX;
4654
4655 arg = CALL_EXPR_ARG (exp, 0);
4656 fmode = TYPE_MODE (TREE_TYPE (arg));
4657 rmode = TYPE_MODE (TREE_TYPE (exp));
4658 fmt = REAL_MODE_FORMAT (fmode);
4659
4660 arg = builtin_save_expr (arg);
4661
4662 /* Expand the argument yielding a RTX expression. */
4663 temp = expand_normal (arg);
4664
4665 /* Check if the back end provides an insn that handles signbit for the
4666 argument's mode. */
4667 icode = optab_handler (signbit_optab, fmode);
4668 if (icode != CODE_FOR_nothing)
4669 {
4670 rtx_insn *last = get_last_insn ();
4671 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4672 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4673 return target;
4674 delete_insns_since (last);
4675 }
4676
4677 /* For floating point formats without a sign bit, implement signbit
4678 as "ARG < 0.0". */
4679 bitpos = fmt->signbit_ro;
4680 if (bitpos < 0)
4681 {
4682 /* But we can't do this if the format supports signed zero. */
4683 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4684
4685 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4686 build_real (TREE_TYPE (arg), dconst0));
4687 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4688 }
4689
4690 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4691 {
4692 imode = int_mode_for_mode (fmode);
4693 gcc_assert (imode != BLKmode);
4694 temp = gen_lowpart (imode, temp);
4695 }
4696 else
4697 {
4698 imode = word_mode;
4699 /* Handle targets with different FP word orders. */
4700 if (FLOAT_WORDS_BIG_ENDIAN)
4701 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4702 else
4703 word = bitpos / BITS_PER_WORD;
4704 temp = operand_subword_force (temp, word, fmode);
4705 bitpos = bitpos % BITS_PER_WORD;
4706 }
4707
4708 /* Force the intermediate word_mode (or narrower) result into a
4709 register. This avoids attempting to create paradoxical SUBREGs
4710 of floating point modes below. */
4711 temp = force_reg (imode, temp);
4712
4713 /* If the bitpos is within the "result mode" lowpart, the operation
4714 can be implement with a single bitwise AND. Otherwise, we need
4715 a right shift and an AND. */
4716
4717 if (bitpos < GET_MODE_BITSIZE (rmode))
4718 {
4719 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4720
4721 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4722 temp = gen_lowpart (rmode, temp);
4723 temp = expand_binop (rmode, and_optab, temp,
4724 immed_wide_int_const (mask, rmode),
4725 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4726 }
4727 else
4728 {
4729 /* Perform a logical right shift to place the signbit in the least
4730 significant bit, then truncate the result to the desired mode
4731 and mask just this bit. */
4732 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4733 temp = gen_lowpart (rmode, temp);
4734 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4735 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4736 }
4737
4738 return temp;
4739 }
4740
4741 /* Expand fork or exec calls. TARGET is the desired target of the
4742 call. EXP is the call. FN is the
4743 identificator of the actual function. IGNORE is nonzero if the
4744 value is to be ignored. */
4745
4746 static rtx
4747 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4748 {
4749 tree id, decl;
4750 tree call;
4751
4752 /* If we are not profiling, just call the function. */
4753 if (!profile_arc_flag)
4754 return NULL_RTX;
4755
4756 /* Otherwise call the wrapper. This should be equivalent for the rest of
4757 compiler, so the code does not diverge, and the wrapper may run the
4758 code necessary for keeping the profiling sane. */
4759
4760 switch (DECL_FUNCTION_CODE (fn))
4761 {
4762 case BUILT_IN_FORK:
4763 id = get_identifier ("__gcov_fork");
4764 break;
4765
4766 case BUILT_IN_EXECL:
4767 id = get_identifier ("__gcov_execl");
4768 break;
4769
4770 case BUILT_IN_EXECV:
4771 id = get_identifier ("__gcov_execv");
4772 break;
4773
4774 case BUILT_IN_EXECLP:
4775 id = get_identifier ("__gcov_execlp");
4776 break;
4777
4778 case BUILT_IN_EXECLE:
4779 id = get_identifier ("__gcov_execle");
4780 break;
4781
4782 case BUILT_IN_EXECVP:
4783 id = get_identifier ("__gcov_execvp");
4784 break;
4785
4786 case BUILT_IN_EXECVE:
4787 id = get_identifier ("__gcov_execve");
4788 break;
4789
4790 default:
4791 gcc_unreachable ();
4792 }
4793
4794 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4795 FUNCTION_DECL, id, TREE_TYPE (fn));
4796 DECL_EXTERNAL (decl) = 1;
4797 TREE_PUBLIC (decl) = 1;
4798 DECL_ARTIFICIAL (decl) = 1;
4799 TREE_NOTHROW (decl) = 1;
4800 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4801 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4802 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4803 return expand_call (call, target, ignore);
4804 }
4805
4806
4807 \f
4808 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4809 the pointer in these functions is void*, the tree optimizers may remove
4810 casts. The mode computed in expand_builtin isn't reliable either, due
4811 to __sync_bool_compare_and_swap.
4812
4813 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4814 group of builtins. This gives us log2 of the mode size. */
4815
4816 static inline machine_mode
4817 get_builtin_sync_mode (int fcode_diff)
4818 {
4819 /* The size is not negotiable, so ask not to get BLKmode in return
4820 if the target indicates that a smaller size would be better. */
4821 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4822 }
4823
4824 /* Expand the memory expression LOC and return the appropriate memory operand
4825 for the builtin_sync operations. */
4826
4827 static rtx
4828 get_builtin_sync_mem (tree loc, machine_mode mode)
4829 {
4830 rtx addr, mem;
4831
4832 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4833 addr = convert_memory_address (Pmode, addr);
4834
4835 /* Note that we explicitly do not want any alias information for this
4836 memory, so that we kill all other live memories. Otherwise we don't
4837 satisfy the full barrier semantics of the intrinsic. */
4838 mem = validize_mem (gen_rtx_MEM (mode, addr));
4839
4840 /* The alignment needs to be at least according to that of the mode. */
4841 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4842 get_pointer_alignment (loc)));
4843 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4844 MEM_VOLATILE_P (mem) = 1;
4845
4846 return mem;
4847 }
4848
4849 /* Make sure an argument is in the right mode.
4850 EXP is the tree argument.
4851 MODE is the mode it should be in. */
4852
4853 static rtx
4854 expand_expr_force_mode (tree exp, machine_mode mode)
4855 {
4856 rtx val;
4857 machine_mode old_mode;
4858
4859 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4860 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4861 of CONST_INTs, where we know the old_mode only from the call argument. */
4862
4863 old_mode = GET_MODE (val);
4864 if (old_mode == VOIDmode)
4865 old_mode = TYPE_MODE (TREE_TYPE (exp));
4866 val = convert_modes (mode, old_mode, val, 1);
4867 return val;
4868 }
4869
4870
4871 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4872 EXP is the CALL_EXPR. CODE is the rtx code
4873 that corresponds to the arithmetic or logical operation from the name;
4874 an exception here is that NOT actually means NAND. TARGET is an optional
4875 place for us to store the results; AFTER is true if this is the
4876 fetch_and_xxx form. */
4877
4878 static rtx
4879 expand_builtin_sync_operation (machine_mode mode, tree exp,
4880 enum rtx_code code, bool after,
4881 rtx target)
4882 {
4883 rtx val, mem;
4884 location_t loc = EXPR_LOCATION (exp);
4885
4886 if (code == NOT && warn_sync_nand)
4887 {
4888 tree fndecl = get_callee_fndecl (exp);
4889 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4890
4891 static bool warned_f_a_n, warned_n_a_f;
4892
4893 switch (fcode)
4894 {
4895 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4896 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4897 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4898 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4899 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4900 if (warned_f_a_n)
4901 break;
4902
4903 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4904 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4905 warned_f_a_n = true;
4906 break;
4907
4908 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4909 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4910 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4911 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4912 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4913 if (warned_n_a_f)
4914 break;
4915
4916 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4917 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4918 warned_n_a_f = true;
4919 break;
4920
4921 default:
4922 gcc_unreachable ();
4923 }
4924 }
4925
4926 /* Expand the operands. */
4927 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4928 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4929
4930 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4931 after);
4932 }
4933
4934 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4935 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4936 true if this is the boolean form. TARGET is a place for us to store the
4937 results; this is NOT optional if IS_BOOL is true. */
4938
4939 static rtx
4940 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4941 bool is_bool, rtx target)
4942 {
4943 rtx old_val, new_val, mem;
4944 rtx *pbool, *poval;
4945
4946 /* Expand the operands. */
4947 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4948 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4949 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4950
4951 pbool = poval = NULL;
4952 if (target != const0_rtx)
4953 {
4954 if (is_bool)
4955 pbool = &target;
4956 else
4957 poval = &target;
4958 }
4959 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4960 false, MEMMODEL_SYNC_SEQ_CST,
4961 MEMMODEL_SYNC_SEQ_CST))
4962 return NULL_RTX;
4963
4964 return target;
4965 }
4966
4967 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4968 general form is actually an atomic exchange, and some targets only
4969 support a reduced form with the second argument being a constant 1.
4970 EXP is the CALL_EXPR; TARGET is an optional place for us to store
4971 the results. */
4972
4973 static rtx
4974 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
4975 rtx target)
4976 {
4977 rtx val, mem;
4978
4979 /* Expand the operands. */
4980 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4981 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4982
4983 return expand_sync_lock_test_and_set (target, mem, val);
4984 }
4985
4986 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
4987
4988 static void
4989 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
4990 {
4991 rtx mem;
4992
4993 /* Expand the operands. */
4994 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4995
4996 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
4997 }
4998
4999 /* Given an integer representing an ``enum memmodel'', verify its
5000 correctness and return the memory model enum. */
5001
5002 static enum memmodel
5003 get_memmodel (tree exp)
5004 {
5005 rtx op;
5006 unsigned HOST_WIDE_INT val;
5007 source_location loc
5008 = expansion_point_location_if_in_system_header (input_location);
5009
5010 /* If the parameter is not a constant, it's a run time value so we'll just
5011 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5012 if (TREE_CODE (exp) != INTEGER_CST)
5013 return MEMMODEL_SEQ_CST;
5014
5015 op = expand_normal (exp);
5016
5017 val = INTVAL (op);
5018 if (targetm.memmodel_check)
5019 val = targetm.memmodel_check (val);
5020 else if (val & ~MEMMODEL_MASK)
5021 {
5022 warning_at (loc, OPT_Winvalid_memory_model,
5023 "unknown architecture specifier in memory model to builtin");
5024 return MEMMODEL_SEQ_CST;
5025 }
5026
5027 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5028 if (memmodel_base (val) >= MEMMODEL_LAST)
5029 {
5030 warning_at (loc, OPT_Winvalid_memory_model,
5031 "invalid memory model argument to builtin");
5032 return MEMMODEL_SEQ_CST;
5033 }
5034
5035 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5036 be conservative and promote consume to acquire. */
5037 if (val == MEMMODEL_CONSUME)
5038 val = MEMMODEL_ACQUIRE;
5039
5040 return (enum memmodel) val;
5041 }
5042
5043 /* Expand the __atomic_exchange intrinsic:
5044 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5045 EXP is the CALL_EXPR.
5046 TARGET is an optional place for us to store the results. */
5047
5048 static rtx
5049 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5050 {
5051 rtx val, mem;
5052 enum memmodel model;
5053
5054 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5055
5056 if (!flag_inline_atomics)
5057 return NULL_RTX;
5058
5059 /* Expand the operands. */
5060 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5061 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5062
5063 return expand_atomic_exchange (target, mem, val, model);
5064 }
5065
5066 /* Expand the __atomic_compare_exchange intrinsic:
5067 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5068 TYPE desired, BOOL weak,
5069 enum memmodel success,
5070 enum memmodel failure)
5071 EXP is the CALL_EXPR.
5072 TARGET is an optional place for us to store the results. */
5073
5074 static rtx
5075 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5076 rtx target)
5077 {
5078 rtx expect, desired, mem, oldval;
5079 rtx_code_label *label;
5080 enum memmodel success, failure;
5081 tree weak;
5082 bool is_weak;
5083 source_location loc
5084 = expansion_point_location_if_in_system_header (input_location);
5085
5086 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5087 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5088
5089 if (failure > success)
5090 {
5091 warning_at (loc, OPT_Winvalid_memory_model,
5092 "failure memory model cannot be stronger than success "
5093 "memory model for %<__atomic_compare_exchange%>");
5094 success = MEMMODEL_SEQ_CST;
5095 }
5096
5097 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5098 {
5099 warning_at (loc, OPT_Winvalid_memory_model,
5100 "invalid failure memory model for "
5101 "%<__atomic_compare_exchange%>");
5102 failure = MEMMODEL_SEQ_CST;
5103 success = MEMMODEL_SEQ_CST;
5104 }
5105
5106
5107 if (!flag_inline_atomics)
5108 return NULL_RTX;
5109
5110 /* Expand the operands. */
5111 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5112
5113 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5114 expect = convert_memory_address (Pmode, expect);
5115 expect = gen_rtx_MEM (mode, expect);
5116 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5117
5118 weak = CALL_EXPR_ARG (exp, 3);
5119 is_weak = false;
5120 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5121 is_weak = true;
5122
5123 if (target == const0_rtx)
5124 target = NULL;
5125
5126 /* Lest the rtl backend create a race condition with an imporoper store
5127 to memory, always create a new pseudo for OLDVAL. */
5128 oldval = NULL;
5129
5130 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5131 is_weak, success, failure))
5132 return NULL_RTX;
5133
5134 /* Conditionally store back to EXPECT, lest we create a race condition
5135 with an improper store to memory. */
5136 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5137 the normal case where EXPECT is totally private, i.e. a register. At
5138 which point the store can be unconditional. */
5139 label = gen_label_rtx ();
5140 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5141 GET_MODE (target), 1, label);
5142 emit_move_insn (expect, oldval);
5143 emit_label (label);
5144
5145 return target;
5146 }
5147
5148 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5149 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5150 call. The weak parameter must be dropped to match the expected parameter
5151 list and the expected argument changed from value to pointer to memory
5152 slot. */
5153
5154 static void
5155 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5156 {
5157 unsigned int z;
5158 vec<tree, va_gc> *vec;
5159
5160 vec_alloc (vec, 5);
5161 vec->quick_push (gimple_call_arg (call, 0));
5162 tree expected = gimple_call_arg (call, 1);
5163 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5164 TREE_TYPE (expected));
5165 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5166 if (expd != x)
5167 emit_move_insn (x, expd);
5168 tree v = make_tree (TREE_TYPE (expected), x);
5169 vec->quick_push (build1 (ADDR_EXPR,
5170 build_pointer_type (TREE_TYPE (expected)), v));
5171 vec->quick_push (gimple_call_arg (call, 2));
5172 /* Skip the boolean weak parameter. */
5173 for (z = 4; z < 6; z++)
5174 vec->quick_push (gimple_call_arg (call, z));
5175 built_in_function fncode
5176 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5177 + exact_log2 (GET_MODE_SIZE (mode)));
5178 tree fndecl = builtin_decl_explicit (fncode);
5179 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5180 fndecl);
5181 tree exp = build_call_vec (boolean_type_node, fn, vec);
5182 tree lhs = gimple_call_lhs (call);
5183 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5184 if (lhs)
5185 {
5186 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5187 if (GET_MODE (boolret) != mode)
5188 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5189 x = force_reg (mode, x);
5190 write_complex_part (target, boolret, true);
5191 write_complex_part (target, x, false);
5192 }
5193 }
5194
5195 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5196
5197 void
5198 expand_ifn_atomic_compare_exchange (gcall *call)
5199 {
5200 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5201 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5202 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5203 rtx expect, desired, mem, oldval, boolret;
5204 enum memmodel success, failure;
5205 tree lhs;
5206 bool is_weak;
5207 source_location loc
5208 = expansion_point_location_if_in_system_header (gimple_location (call));
5209
5210 success = get_memmodel (gimple_call_arg (call, 4));
5211 failure = get_memmodel (gimple_call_arg (call, 5));
5212
5213 if (failure > success)
5214 {
5215 warning_at (loc, OPT_Winvalid_memory_model,
5216 "failure memory model cannot be stronger than success "
5217 "memory model for %<__atomic_compare_exchange%>");
5218 success = MEMMODEL_SEQ_CST;
5219 }
5220
5221 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5222 {
5223 warning_at (loc, OPT_Winvalid_memory_model,
5224 "invalid failure memory model for "
5225 "%<__atomic_compare_exchange%>");
5226 failure = MEMMODEL_SEQ_CST;
5227 success = MEMMODEL_SEQ_CST;
5228 }
5229
5230 if (!flag_inline_atomics)
5231 {
5232 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5233 return;
5234 }
5235
5236 /* Expand the operands. */
5237 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5238
5239 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5240 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5241
5242 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5243
5244 boolret = NULL;
5245 oldval = NULL;
5246
5247 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5248 is_weak, success, failure))
5249 {
5250 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5251 return;
5252 }
5253
5254 lhs = gimple_call_lhs (call);
5255 if (lhs)
5256 {
5257 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5258 if (GET_MODE (boolret) != mode)
5259 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5260 write_complex_part (target, boolret, true);
5261 write_complex_part (target, oldval, false);
5262 }
5263 }
5264
5265 /* Expand the __atomic_load intrinsic:
5266 TYPE __atomic_load (TYPE *object, enum memmodel)
5267 EXP is the CALL_EXPR.
5268 TARGET is an optional place for us to store the results. */
5269
5270 static rtx
5271 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5272 {
5273 rtx mem;
5274 enum memmodel model;
5275
5276 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5277 if (is_mm_release (model) || is_mm_acq_rel (model))
5278 {
5279 source_location loc
5280 = expansion_point_location_if_in_system_header (input_location);
5281 warning_at (loc, OPT_Winvalid_memory_model,
5282 "invalid memory model for %<__atomic_load%>");
5283 model = MEMMODEL_SEQ_CST;
5284 }
5285
5286 if (!flag_inline_atomics)
5287 return NULL_RTX;
5288
5289 /* Expand the operand. */
5290 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5291
5292 return expand_atomic_load (target, mem, model);
5293 }
5294
5295
5296 /* Expand the __atomic_store intrinsic:
5297 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5298 EXP is the CALL_EXPR.
5299 TARGET is an optional place for us to store the results. */
5300
5301 static rtx
5302 expand_builtin_atomic_store (machine_mode mode, tree exp)
5303 {
5304 rtx mem, val;
5305 enum memmodel model;
5306
5307 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5308 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5309 || is_mm_release (model)))
5310 {
5311 source_location loc
5312 = expansion_point_location_if_in_system_header (input_location);
5313 warning_at (loc, OPT_Winvalid_memory_model,
5314 "invalid memory model for %<__atomic_store%>");
5315 model = MEMMODEL_SEQ_CST;
5316 }
5317
5318 if (!flag_inline_atomics)
5319 return NULL_RTX;
5320
5321 /* Expand the operands. */
5322 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5323 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5324
5325 return expand_atomic_store (mem, val, model, false);
5326 }
5327
5328 /* Expand the __atomic_fetch_XXX intrinsic:
5329 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5330 EXP is the CALL_EXPR.
5331 TARGET is an optional place for us to store the results.
5332 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5333 FETCH_AFTER is true if returning the result of the operation.
5334 FETCH_AFTER is false if returning the value before the operation.
5335 IGNORE is true if the result is not used.
5336 EXT_CALL is the correct builtin for an external call if this cannot be
5337 resolved to an instruction sequence. */
5338
5339 static rtx
5340 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5341 enum rtx_code code, bool fetch_after,
5342 bool ignore, enum built_in_function ext_call)
5343 {
5344 rtx val, mem, ret;
5345 enum memmodel model;
5346 tree fndecl;
5347 tree addr;
5348
5349 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5350
5351 /* Expand the operands. */
5352 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5353 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5354
5355 /* Only try generating instructions if inlining is turned on. */
5356 if (flag_inline_atomics)
5357 {
5358 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5359 if (ret)
5360 return ret;
5361 }
5362
5363 /* Return if a different routine isn't needed for the library call. */
5364 if (ext_call == BUILT_IN_NONE)
5365 return NULL_RTX;
5366
5367 /* Change the call to the specified function. */
5368 fndecl = get_callee_fndecl (exp);
5369 addr = CALL_EXPR_FN (exp);
5370 STRIP_NOPS (addr);
5371
5372 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5373 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5374
5375 /* Expand the call here so we can emit trailing code. */
5376 ret = expand_call (exp, target, ignore);
5377
5378 /* Replace the original function just in case it matters. */
5379 TREE_OPERAND (addr, 0) = fndecl;
5380
5381 /* Then issue the arithmetic correction to return the right result. */
5382 if (!ignore)
5383 {
5384 if (code == NOT)
5385 {
5386 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5387 OPTAB_LIB_WIDEN);
5388 ret = expand_simple_unop (mode, NOT, ret, target, true);
5389 }
5390 else
5391 ret = expand_simple_binop (mode, code, ret, val, target, true,
5392 OPTAB_LIB_WIDEN);
5393 }
5394 return ret;
5395 }
5396
5397 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5398
5399 void
5400 expand_ifn_atomic_bit_test_and (gcall *call)
5401 {
5402 tree ptr = gimple_call_arg (call, 0);
5403 tree bit = gimple_call_arg (call, 1);
5404 tree flag = gimple_call_arg (call, 2);
5405 tree lhs = gimple_call_lhs (call);
5406 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5407 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5408 enum rtx_code code;
5409 optab optab;
5410 struct expand_operand ops[5];
5411
5412 gcc_assert (flag_inline_atomics);
5413
5414 if (gimple_call_num_args (call) == 4)
5415 model = get_memmodel (gimple_call_arg (call, 3));
5416
5417 rtx mem = get_builtin_sync_mem (ptr, mode);
5418 rtx val = expand_expr_force_mode (bit, mode);
5419
5420 switch (gimple_call_internal_fn (call))
5421 {
5422 case IFN_ATOMIC_BIT_TEST_AND_SET:
5423 code = IOR;
5424 optab = atomic_bit_test_and_set_optab;
5425 break;
5426 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5427 code = XOR;
5428 optab = atomic_bit_test_and_complement_optab;
5429 break;
5430 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5431 code = AND;
5432 optab = atomic_bit_test_and_reset_optab;
5433 break;
5434 default:
5435 gcc_unreachable ();
5436 }
5437
5438 if (lhs == NULL_TREE)
5439 {
5440 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5441 val, NULL_RTX, true, OPTAB_DIRECT);
5442 if (code == AND)
5443 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5444 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5445 return;
5446 }
5447
5448 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5449 enum insn_code icode = direct_optab_handler (optab, mode);
5450 gcc_assert (icode != CODE_FOR_nothing);
5451 create_output_operand (&ops[0], target, mode);
5452 create_fixed_operand (&ops[1], mem);
5453 create_convert_operand_to (&ops[2], val, mode, true);
5454 create_integer_operand (&ops[3], model);
5455 create_integer_operand (&ops[4], integer_onep (flag));
5456 if (maybe_expand_insn (icode, 5, ops))
5457 return;
5458
5459 rtx bitval = val;
5460 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5461 val, NULL_RTX, true, OPTAB_DIRECT);
5462 rtx maskval = val;
5463 if (code == AND)
5464 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5465 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5466 code, model, false);
5467 if (integer_onep (flag))
5468 {
5469 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5470 NULL_RTX, true, OPTAB_DIRECT);
5471 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5472 true, OPTAB_DIRECT);
5473 }
5474 else
5475 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5476 OPTAB_DIRECT);
5477 if (result != target)
5478 emit_move_insn (target, result);
5479 }
5480
5481 /* Expand an atomic clear operation.
5482 void _atomic_clear (BOOL *obj, enum memmodel)
5483 EXP is the call expression. */
5484
5485 static rtx
5486 expand_builtin_atomic_clear (tree exp)
5487 {
5488 machine_mode mode;
5489 rtx mem, ret;
5490 enum memmodel model;
5491
5492 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5493 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5494 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5495
5496 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5497 {
5498 source_location loc
5499 = expansion_point_location_if_in_system_header (input_location);
5500 warning_at (loc, OPT_Winvalid_memory_model,
5501 "invalid memory model for %<__atomic_store%>");
5502 model = MEMMODEL_SEQ_CST;
5503 }
5504
5505 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5506 Failing that, a store is issued by __atomic_store. The only way this can
5507 fail is if the bool type is larger than a word size. Unlikely, but
5508 handle it anyway for completeness. Assume a single threaded model since
5509 there is no atomic support in this case, and no barriers are required. */
5510 ret = expand_atomic_store (mem, const0_rtx, model, true);
5511 if (!ret)
5512 emit_move_insn (mem, const0_rtx);
5513 return const0_rtx;
5514 }
5515
5516 /* Expand an atomic test_and_set operation.
5517 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5518 EXP is the call expression. */
5519
5520 static rtx
5521 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5522 {
5523 rtx mem;
5524 enum memmodel model;
5525 machine_mode mode;
5526
5527 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5528 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5529 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5530
5531 return expand_atomic_test_and_set (target, mem, model);
5532 }
5533
5534
5535 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5536 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5537
5538 static tree
5539 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5540 {
5541 int size;
5542 machine_mode mode;
5543 unsigned int mode_align, type_align;
5544
5545 if (TREE_CODE (arg0) != INTEGER_CST)
5546 return NULL_TREE;
5547
5548 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5549 mode = mode_for_size (size, MODE_INT, 0);
5550 mode_align = GET_MODE_ALIGNMENT (mode);
5551
5552 if (TREE_CODE (arg1) == INTEGER_CST)
5553 {
5554 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5555
5556 /* Either this argument is null, or it's a fake pointer encoding
5557 the alignment of the object. */
5558 val = least_bit_hwi (val);
5559 val *= BITS_PER_UNIT;
5560
5561 if (val == 0 || mode_align < val)
5562 type_align = mode_align;
5563 else
5564 type_align = val;
5565 }
5566 else
5567 {
5568 tree ttype = TREE_TYPE (arg1);
5569
5570 /* This function is usually invoked and folded immediately by the front
5571 end before anything else has a chance to look at it. The pointer
5572 parameter at this point is usually cast to a void *, so check for that
5573 and look past the cast. */
5574 if (CONVERT_EXPR_P (arg1)
5575 && POINTER_TYPE_P (ttype)
5576 && VOID_TYPE_P (TREE_TYPE (ttype))
5577 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5578 arg1 = TREE_OPERAND (arg1, 0);
5579
5580 ttype = TREE_TYPE (arg1);
5581 gcc_assert (POINTER_TYPE_P (ttype));
5582
5583 /* Get the underlying type of the object. */
5584 ttype = TREE_TYPE (ttype);
5585 type_align = TYPE_ALIGN (ttype);
5586 }
5587
5588 /* If the object has smaller alignment, the lock free routines cannot
5589 be used. */
5590 if (type_align < mode_align)
5591 return boolean_false_node;
5592
5593 /* Check if a compare_and_swap pattern exists for the mode which represents
5594 the required size. The pattern is not allowed to fail, so the existence
5595 of the pattern indicates support is present. */
5596 if (can_compare_and_swap_p (mode, true))
5597 return boolean_true_node;
5598 else
5599 return boolean_false_node;
5600 }
5601
5602 /* Return true if the parameters to call EXP represent an object which will
5603 always generate lock free instructions. The first argument represents the
5604 size of the object, and the second parameter is a pointer to the object
5605 itself. If NULL is passed for the object, then the result is based on
5606 typical alignment for an object of the specified size. Otherwise return
5607 false. */
5608
5609 static rtx
5610 expand_builtin_atomic_always_lock_free (tree exp)
5611 {
5612 tree size;
5613 tree arg0 = CALL_EXPR_ARG (exp, 0);
5614 tree arg1 = CALL_EXPR_ARG (exp, 1);
5615
5616 if (TREE_CODE (arg0) != INTEGER_CST)
5617 {
5618 error ("non-constant argument 1 to __atomic_always_lock_free");
5619 return const0_rtx;
5620 }
5621
5622 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5623 if (size == boolean_true_node)
5624 return const1_rtx;
5625 return const0_rtx;
5626 }
5627
5628 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5629 is lock free on this architecture. */
5630
5631 static tree
5632 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5633 {
5634 if (!flag_inline_atomics)
5635 return NULL_TREE;
5636
5637 /* If it isn't always lock free, don't generate a result. */
5638 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5639 return boolean_true_node;
5640
5641 return NULL_TREE;
5642 }
5643
5644 /* Return true if the parameters to call EXP represent an object which will
5645 always generate lock free instructions. The first argument represents the
5646 size of the object, and the second parameter is a pointer to the object
5647 itself. If NULL is passed for the object, then the result is based on
5648 typical alignment for an object of the specified size. Otherwise return
5649 NULL*/
5650
5651 static rtx
5652 expand_builtin_atomic_is_lock_free (tree exp)
5653 {
5654 tree size;
5655 tree arg0 = CALL_EXPR_ARG (exp, 0);
5656 tree arg1 = CALL_EXPR_ARG (exp, 1);
5657
5658 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5659 {
5660 error ("non-integer argument 1 to __atomic_is_lock_free");
5661 return NULL_RTX;
5662 }
5663
5664 if (!flag_inline_atomics)
5665 return NULL_RTX;
5666
5667 /* If the value is known at compile time, return the RTX for it. */
5668 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5669 if (size == boolean_true_node)
5670 return const1_rtx;
5671
5672 return NULL_RTX;
5673 }
5674
5675 /* Expand the __atomic_thread_fence intrinsic:
5676 void __atomic_thread_fence (enum memmodel)
5677 EXP is the CALL_EXPR. */
5678
5679 static void
5680 expand_builtin_atomic_thread_fence (tree exp)
5681 {
5682 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5683 expand_mem_thread_fence (model);
5684 }
5685
5686 /* Expand the __atomic_signal_fence intrinsic:
5687 void __atomic_signal_fence (enum memmodel)
5688 EXP is the CALL_EXPR. */
5689
5690 static void
5691 expand_builtin_atomic_signal_fence (tree exp)
5692 {
5693 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5694 expand_mem_signal_fence (model);
5695 }
5696
5697 /* Expand the __sync_synchronize intrinsic. */
5698
5699 static void
5700 expand_builtin_sync_synchronize (void)
5701 {
5702 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5703 }
5704
5705 static rtx
5706 expand_builtin_thread_pointer (tree exp, rtx target)
5707 {
5708 enum insn_code icode;
5709 if (!validate_arglist (exp, VOID_TYPE))
5710 return const0_rtx;
5711 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5712 if (icode != CODE_FOR_nothing)
5713 {
5714 struct expand_operand op;
5715 /* If the target is not sutitable then create a new target. */
5716 if (target == NULL_RTX
5717 || !REG_P (target)
5718 || GET_MODE (target) != Pmode)
5719 target = gen_reg_rtx (Pmode);
5720 create_output_operand (&op, target, Pmode);
5721 expand_insn (icode, 1, &op);
5722 return target;
5723 }
5724 error ("__builtin_thread_pointer is not supported on this target");
5725 return const0_rtx;
5726 }
5727
5728 static void
5729 expand_builtin_set_thread_pointer (tree exp)
5730 {
5731 enum insn_code icode;
5732 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5733 return;
5734 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5735 if (icode != CODE_FOR_nothing)
5736 {
5737 struct expand_operand op;
5738 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5739 Pmode, EXPAND_NORMAL);
5740 create_input_operand (&op, val, Pmode);
5741 expand_insn (icode, 1, &op);
5742 return;
5743 }
5744 error ("__builtin_set_thread_pointer is not supported on this target");
5745 }
5746
5747 \f
5748 /* Emit code to restore the current value of stack. */
5749
5750 static void
5751 expand_stack_restore (tree var)
5752 {
5753 rtx_insn *prev;
5754 rtx sa = expand_normal (var);
5755
5756 sa = convert_memory_address (Pmode, sa);
5757
5758 prev = get_last_insn ();
5759 emit_stack_restore (SAVE_BLOCK, sa);
5760
5761 record_new_stack_level ();
5762
5763 fixup_args_size_notes (prev, get_last_insn (), 0);
5764 }
5765
5766 /* Emit code to save the current value of stack. */
5767
5768 static rtx
5769 expand_stack_save (void)
5770 {
5771 rtx ret = NULL_RTX;
5772
5773 emit_stack_save (SAVE_BLOCK, &ret);
5774 return ret;
5775 }
5776
5777
5778 /* Expand an expression EXP that calls a built-in function,
5779 with result going to TARGET if that's convenient
5780 (and in mode MODE if that's convenient).
5781 SUBTARGET may be used as the target for computing one of EXP's operands.
5782 IGNORE is nonzero if the value is to be ignored. */
5783
5784 rtx
5785 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5786 int ignore)
5787 {
5788 tree fndecl = get_callee_fndecl (exp);
5789 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5790 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5791 int flags;
5792
5793 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5794 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5795
5796 /* When ASan is enabled, we don't want to expand some memory/string
5797 builtins and rely on libsanitizer's hooks. This allows us to avoid
5798 redundant checks and be sure, that possible overflow will be detected
5799 by ASan. */
5800
5801 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5802 return expand_call (exp, target, ignore);
5803
5804 /* When not optimizing, generate calls to library functions for a certain
5805 set of builtins. */
5806 if (!optimize
5807 && !called_as_built_in (fndecl)
5808 && fcode != BUILT_IN_FORK
5809 && fcode != BUILT_IN_EXECL
5810 && fcode != BUILT_IN_EXECV
5811 && fcode != BUILT_IN_EXECLP
5812 && fcode != BUILT_IN_EXECLE
5813 && fcode != BUILT_IN_EXECVP
5814 && fcode != BUILT_IN_EXECVE
5815 && fcode != BUILT_IN_ALLOCA
5816 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5817 && fcode != BUILT_IN_FREE
5818 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5819 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5820 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5821 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5822 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5823 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5824 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5825 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5826 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5827 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5828 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5829 && fcode != BUILT_IN_CHKP_BNDRET)
5830 return expand_call (exp, target, ignore);
5831
5832 /* The built-in function expanders test for target == const0_rtx
5833 to determine whether the function's result will be ignored. */
5834 if (ignore)
5835 target = const0_rtx;
5836
5837 /* If the result of a pure or const built-in function is ignored, and
5838 none of its arguments are volatile, we can avoid expanding the
5839 built-in call and just evaluate the arguments for side-effects. */
5840 if (target == const0_rtx
5841 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5842 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5843 {
5844 bool volatilep = false;
5845 tree arg;
5846 call_expr_arg_iterator iter;
5847
5848 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5849 if (TREE_THIS_VOLATILE (arg))
5850 {
5851 volatilep = true;
5852 break;
5853 }
5854
5855 if (! volatilep)
5856 {
5857 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5858 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5859 return const0_rtx;
5860 }
5861 }
5862
5863 /* expand_builtin_with_bounds is supposed to be used for
5864 instrumented builtin calls. */
5865 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5866
5867 switch (fcode)
5868 {
5869 CASE_FLT_FN (BUILT_IN_FABS):
5870 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5871 case BUILT_IN_FABSD32:
5872 case BUILT_IN_FABSD64:
5873 case BUILT_IN_FABSD128:
5874 target = expand_builtin_fabs (exp, target, subtarget);
5875 if (target)
5876 return target;
5877 break;
5878
5879 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5880 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5881 target = expand_builtin_copysign (exp, target, subtarget);
5882 if (target)
5883 return target;
5884 break;
5885
5886 /* Just do a normal library call if we were unable to fold
5887 the values. */
5888 CASE_FLT_FN (BUILT_IN_CABS):
5889 break;
5890
5891 CASE_FLT_FN (BUILT_IN_FMA):
5892 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5893 if (target)
5894 return target;
5895 break;
5896
5897 CASE_FLT_FN (BUILT_IN_ILOGB):
5898 if (! flag_unsafe_math_optimizations)
5899 break;
5900 gcc_fallthrough ();
5901 CASE_FLT_FN (BUILT_IN_ISINF):
5902 CASE_FLT_FN (BUILT_IN_FINITE):
5903 case BUILT_IN_ISFINITE:
5904 case BUILT_IN_ISNORMAL:
5905 target = expand_builtin_interclass_mathfn (exp, target);
5906 if (target)
5907 return target;
5908 break;
5909
5910 CASE_FLT_FN (BUILT_IN_ICEIL):
5911 CASE_FLT_FN (BUILT_IN_LCEIL):
5912 CASE_FLT_FN (BUILT_IN_LLCEIL):
5913 CASE_FLT_FN (BUILT_IN_LFLOOR):
5914 CASE_FLT_FN (BUILT_IN_IFLOOR):
5915 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5916 target = expand_builtin_int_roundingfn (exp, target);
5917 if (target)
5918 return target;
5919 break;
5920
5921 CASE_FLT_FN (BUILT_IN_IRINT):
5922 CASE_FLT_FN (BUILT_IN_LRINT):
5923 CASE_FLT_FN (BUILT_IN_LLRINT):
5924 CASE_FLT_FN (BUILT_IN_IROUND):
5925 CASE_FLT_FN (BUILT_IN_LROUND):
5926 CASE_FLT_FN (BUILT_IN_LLROUND):
5927 target = expand_builtin_int_roundingfn_2 (exp, target);
5928 if (target)
5929 return target;
5930 break;
5931
5932 CASE_FLT_FN (BUILT_IN_POWI):
5933 target = expand_builtin_powi (exp, target);
5934 if (target)
5935 return target;
5936 break;
5937
5938 CASE_FLT_FN (BUILT_IN_CEXPI):
5939 target = expand_builtin_cexpi (exp, target);
5940 gcc_assert (target);
5941 return target;
5942
5943 CASE_FLT_FN (BUILT_IN_SIN):
5944 CASE_FLT_FN (BUILT_IN_COS):
5945 if (! flag_unsafe_math_optimizations)
5946 break;
5947 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5948 if (target)
5949 return target;
5950 break;
5951
5952 CASE_FLT_FN (BUILT_IN_SINCOS):
5953 if (! flag_unsafe_math_optimizations)
5954 break;
5955 target = expand_builtin_sincos (exp);
5956 if (target)
5957 return target;
5958 break;
5959
5960 case BUILT_IN_APPLY_ARGS:
5961 return expand_builtin_apply_args ();
5962
5963 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5964 FUNCTION with a copy of the parameters described by
5965 ARGUMENTS, and ARGSIZE. It returns a block of memory
5966 allocated on the stack into which is stored all the registers
5967 that might possibly be used for returning the result of a
5968 function. ARGUMENTS is the value returned by
5969 __builtin_apply_args. ARGSIZE is the number of bytes of
5970 arguments that must be copied. ??? How should this value be
5971 computed? We'll also need a safe worst case value for varargs
5972 functions. */
5973 case BUILT_IN_APPLY:
5974 if (!validate_arglist (exp, POINTER_TYPE,
5975 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5976 && !validate_arglist (exp, REFERENCE_TYPE,
5977 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5978 return const0_rtx;
5979 else
5980 {
5981 rtx ops[3];
5982
5983 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5984 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5985 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5986
5987 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5988 }
5989
5990 /* __builtin_return (RESULT) causes the function to return the
5991 value described by RESULT. RESULT is address of the block of
5992 memory returned by __builtin_apply. */
5993 case BUILT_IN_RETURN:
5994 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5995 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5996 return const0_rtx;
5997
5998 case BUILT_IN_SAVEREGS:
5999 return expand_builtin_saveregs ();
6000
6001 case BUILT_IN_VA_ARG_PACK:
6002 /* All valid uses of __builtin_va_arg_pack () are removed during
6003 inlining. */
6004 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6005 return const0_rtx;
6006
6007 case BUILT_IN_VA_ARG_PACK_LEN:
6008 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6009 inlining. */
6010 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6011 return const0_rtx;
6012
6013 /* Return the address of the first anonymous stack arg. */
6014 case BUILT_IN_NEXT_ARG:
6015 if (fold_builtin_next_arg (exp, false))
6016 return const0_rtx;
6017 return expand_builtin_next_arg ();
6018
6019 case BUILT_IN_CLEAR_CACHE:
6020 target = expand_builtin___clear_cache (exp);
6021 if (target)
6022 return target;
6023 break;
6024
6025 case BUILT_IN_CLASSIFY_TYPE:
6026 return expand_builtin_classify_type (exp);
6027
6028 case BUILT_IN_CONSTANT_P:
6029 return const0_rtx;
6030
6031 case BUILT_IN_FRAME_ADDRESS:
6032 case BUILT_IN_RETURN_ADDRESS:
6033 return expand_builtin_frame_address (fndecl, exp);
6034
6035 /* Returns the address of the area where the structure is returned.
6036 0 otherwise. */
6037 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6038 if (call_expr_nargs (exp) != 0
6039 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6040 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6041 return const0_rtx;
6042 else
6043 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6044
6045 case BUILT_IN_ALLOCA:
6046 case BUILT_IN_ALLOCA_WITH_ALIGN:
6047 /* If the allocation stems from the declaration of a variable-sized
6048 object, it cannot accumulate. */
6049 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6050 if (target)
6051 return target;
6052 break;
6053
6054 case BUILT_IN_STACK_SAVE:
6055 return expand_stack_save ();
6056
6057 case BUILT_IN_STACK_RESTORE:
6058 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6059 return const0_rtx;
6060
6061 case BUILT_IN_BSWAP16:
6062 case BUILT_IN_BSWAP32:
6063 case BUILT_IN_BSWAP64:
6064 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6065 if (target)
6066 return target;
6067 break;
6068
6069 CASE_INT_FN (BUILT_IN_FFS):
6070 target = expand_builtin_unop (target_mode, exp, target,
6071 subtarget, ffs_optab);
6072 if (target)
6073 return target;
6074 break;
6075
6076 CASE_INT_FN (BUILT_IN_CLZ):
6077 target = expand_builtin_unop (target_mode, exp, target,
6078 subtarget, clz_optab);
6079 if (target)
6080 return target;
6081 break;
6082
6083 CASE_INT_FN (BUILT_IN_CTZ):
6084 target = expand_builtin_unop (target_mode, exp, target,
6085 subtarget, ctz_optab);
6086 if (target)
6087 return target;
6088 break;
6089
6090 CASE_INT_FN (BUILT_IN_CLRSB):
6091 target = expand_builtin_unop (target_mode, exp, target,
6092 subtarget, clrsb_optab);
6093 if (target)
6094 return target;
6095 break;
6096
6097 CASE_INT_FN (BUILT_IN_POPCOUNT):
6098 target = expand_builtin_unop (target_mode, exp, target,
6099 subtarget, popcount_optab);
6100 if (target)
6101 return target;
6102 break;
6103
6104 CASE_INT_FN (BUILT_IN_PARITY):
6105 target = expand_builtin_unop (target_mode, exp, target,
6106 subtarget, parity_optab);
6107 if (target)
6108 return target;
6109 break;
6110
6111 case BUILT_IN_STRLEN:
6112 target = expand_builtin_strlen (exp, target, target_mode);
6113 if (target)
6114 return target;
6115 break;
6116
6117 case BUILT_IN_STRCPY:
6118 target = expand_builtin_strcpy (exp, target);
6119 if (target)
6120 return target;
6121 break;
6122
6123 case BUILT_IN_STRNCPY:
6124 target = expand_builtin_strncpy (exp, target);
6125 if (target)
6126 return target;
6127 break;
6128
6129 case BUILT_IN_STPCPY:
6130 target = expand_builtin_stpcpy (exp, target, mode);
6131 if (target)
6132 return target;
6133 break;
6134
6135 case BUILT_IN_MEMCPY:
6136 target = expand_builtin_memcpy (exp, target);
6137 if (target)
6138 return target;
6139 break;
6140
6141 case BUILT_IN_MEMPCPY:
6142 target = expand_builtin_mempcpy (exp, target, mode);
6143 if (target)
6144 return target;
6145 break;
6146
6147 case BUILT_IN_MEMSET:
6148 target = expand_builtin_memset (exp, target, mode);
6149 if (target)
6150 return target;
6151 break;
6152
6153 case BUILT_IN_BZERO:
6154 target = expand_builtin_bzero (exp);
6155 if (target)
6156 return target;
6157 break;
6158
6159 case BUILT_IN_STRCMP:
6160 target = expand_builtin_strcmp (exp, target);
6161 if (target)
6162 return target;
6163 break;
6164
6165 case BUILT_IN_STRNCMP:
6166 target = expand_builtin_strncmp (exp, target, mode);
6167 if (target)
6168 return target;
6169 break;
6170
6171 case BUILT_IN_BCMP:
6172 case BUILT_IN_MEMCMP:
6173 case BUILT_IN_MEMCMP_EQ:
6174 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6175 if (target)
6176 return target;
6177 if (fcode == BUILT_IN_MEMCMP_EQ)
6178 {
6179 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6180 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6181 }
6182 break;
6183
6184 case BUILT_IN_SETJMP:
6185 /* This should have been lowered to the builtins below. */
6186 gcc_unreachable ();
6187
6188 case BUILT_IN_SETJMP_SETUP:
6189 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6190 and the receiver label. */
6191 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6192 {
6193 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6194 VOIDmode, EXPAND_NORMAL);
6195 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6196 rtx_insn *label_r = label_rtx (label);
6197
6198 /* This is copied from the handling of non-local gotos. */
6199 expand_builtin_setjmp_setup (buf_addr, label_r);
6200 nonlocal_goto_handler_labels
6201 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6202 nonlocal_goto_handler_labels);
6203 /* ??? Do not let expand_label treat us as such since we would
6204 not want to be both on the list of non-local labels and on
6205 the list of forced labels. */
6206 FORCED_LABEL (label) = 0;
6207 return const0_rtx;
6208 }
6209 break;
6210
6211 case BUILT_IN_SETJMP_RECEIVER:
6212 /* __builtin_setjmp_receiver is passed the receiver label. */
6213 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6214 {
6215 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6216 rtx_insn *label_r = label_rtx (label);
6217
6218 expand_builtin_setjmp_receiver (label_r);
6219 return const0_rtx;
6220 }
6221 break;
6222
6223 /* __builtin_longjmp is passed a pointer to an array of five words.
6224 It's similar to the C library longjmp function but works with
6225 __builtin_setjmp above. */
6226 case BUILT_IN_LONGJMP:
6227 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6228 {
6229 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6230 VOIDmode, EXPAND_NORMAL);
6231 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6232
6233 if (value != const1_rtx)
6234 {
6235 error ("%<__builtin_longjmp%> second argument must be 1");
6236 return const0_rtx;
6237 }
6238
6239 expand_builtin_longjmp (buf_addr, value);
6240 return const0_rtx;
6241 }
6242 break;
6243
6244 case BUILT_IN_NONLOCAL_GOTO:
6245 target = expand_builtin_nonlocal_goto (exp);
6246 if (target)
6247 return target;
6248 break;
6249
6250 /* This updates the setjmp buffer that is its argument with the value
6251 of the current stack pointer. */
6252 case BUILT_IN_UPDATE_SETJMP_BUF:
6253 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6254 {
6255 rtx buf_addr
6256 = expand_normal (CALL_EXPR_ARG (exp, 0));
6257
6258 expand_builtin_update_setjmp_buf (buf_addr);
6259 return const0_rtx;
6260 }
6261 break;
6262
6263 case BUILT_IN_TRAP:
6264 expand_builtin_trap ();
6265 return const0_rtx;
6266
6267 case BUILT_IN_UNREACHABLE:
6268 expand_builtin_unreachable ();
6269 return const0_rtx;
6270
6271 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6272 case BUILT_IN_SIGNBITD32:
6273 case BUILT_IN_SIGNBITD64:
6274 case BUILT_IN_SIGNBITD128:
6275 target = expand_builtin_signbit (exp, target);
6276 if (target)
6277 return target;
6278 break;
6279
6280 /* Various hooks for the DWARF 2 __throw routine. */
6281 case BUILT_IN_UNWIND_INIT:
6282 expand_builtin_unwind_init ();
6283 return const0_rtx;
6284 case BUILT_IN_DWARF_CFA:
6285 return virtual_cfa_rtx;
6286 #ifdef DWARF2_UNWIND_INFO
6287 case BUILT_IN_DWARF_SP_COLUMN:
6288 return expand_builtin_dwarf_sp_column ();
6289 case BUILT_IN_INIT_DWARF_REG_SIZES:
6290 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6291 return const0_rtx;
6292 #endif
6293 case BUILT_IN_FROB_RETURN_ADDR:
6294 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6295 case BUILT_IN_EXTRACT_RETURN_ADDR:
6296 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6297 case BUILT_IN_EH_RETURN:
6298 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6299 CALL_EXPR_ARG (exp, 1));
6300 return const0_rtx;
6301 case BUILT_IN_EH_RETURN_DATA_REGNO:
6302 return expand_builtin_eh_return_data_regno (exp);
6303 case BUILT_IN_EXTEND_POINTER:
6304 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6305 case BUILT_IN_EH_POINTER:
6306 return expand_builtin_eh_pointer (exp);
6307 case BUILT_IN_EH_FILTER:
6308 return expand_builtin_eh_filter (exp);
6309 case BUILT_IN_EH_COPY_VALUES:
6310 return expand_builtin_eh_copy_values (exp);
6311
6312 case BUILT_IN_VA_START:
6313 return expand_builtin_va_start (exp);
6314 case BUILT_IN_VA_END:
6315 return expand_builtin_va_end (exp);
6316 case BUILT_IN_VA_COPY:
6317 return expand_builtin_va_copy (exp);
6318 case BUILT_IN_EXPECT:
6319 return expand_builtin_expect (exp, target);
6320 case BUILT_IN_ASSUME_ALIGNED:
6321 return expand_builtin_assume_aligned (exp, target);
6322 case BUILT_IN_PREFETCH:
6323 expand_builtin_prefetch (exp);
6324 return const0_rtx;
6325
6326 case BUILT_IN_INIT_TRAMPOLINE:
6327 return expand_builtin_init_trampoline (exp, true);
6328 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6329 return expand_builtin_init_trampoline (exp, false);
6330 case BUILT_IN_ADJUST_TRAMPOLINE:
6331 return expand_builtin_adjust_trampoline (exp);
6332
6333 case BUILT_IN_FORK:
6334 case BUILT_IN_EXECL:
6335 case BUILT_IN_EXECV:
6336 case BUILT_IN_EXECLP:
6337 case BUILT_IN_EXECLE:
6338 case BUILT_IN_EXECVP:
6339 case BUILT_IN_EXECVE:
6340 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6341 if (target)
6342 return target;
6343 break;
6344
6345 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6346 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6347 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6348 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6349 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6350 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6351 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6352 if (target)
6353 return target;
6354 break;
6355
6356 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6357 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6358 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6359 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6360 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6361 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6362 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6363 if (target)
6364 return target;
6365 break;
6366
6367 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6368 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6369 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6370 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6371 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6372 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6373 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6374 if (target)
6375 return target;
6376 break;
6377
6378 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6379 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6380 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6381 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6382 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6383 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6384 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6385 if (target)
6386 return target;
6387 break;
6388
6389 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6390 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6391 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6392 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6393 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6394 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6395 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6396 if (target)
6397 return target;
6398 break;
6399
6400 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6401 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6402 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6403 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6404 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6405 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6406 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6407 if (target)
6408 return target;
6409 break;
6410
6411 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6412 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6413 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6414 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6415 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6416 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6417 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6418 if (target)
6419 return target;
6420 break;
6421
6422 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6423 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6424 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6425 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6426 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6427 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6428 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6429 if (target)
6430 return target;
6431 break;
6432
6433 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6434 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6435 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6436 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6437 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6438 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6439 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6440 if (target)
6441 return target;
6442 break;
6443
6444 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6445 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6446 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6447 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6448 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6449 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6450 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6451 if (target)
6452 return target;
6453 break;
6454
6455 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6456 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6457 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6458 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6459 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6460 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6461 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6462 if (target)
6463 return target;
6464 break;
6465
6466 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6467 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6468 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6469 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6470 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6471 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6472 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6473 if (target)
6474 return target;
6475 break;
6476
6477 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6478 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6479 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6480 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6481 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6482 if (mode == VOIDmode)
6483 mode = TYPE_MODE (boolean_type_node);
6484 if (!target || !register_operand (target, mode))
6485 target = gen_reg_rtx (mode);
6486
6487 mode = get_builtin_sync_mode
6488 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6489 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6490 if (target)
6491 return target;
6492 break;
6493
6494 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6495 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6496 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6497 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6498 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6499 mode = get_builtin_sync_mode
6500 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6501 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6502 if (target)
6503 return target;
6504 break;
6505
6506 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6507 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6508 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6509 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6510 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6511 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6512 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6513 if (target)
6514 return target;
6515 break;
6516
6517 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6518 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6519 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6520 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6521 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6522 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6523 expand_builtin_sync_lock_release (mode, exp);
6524 return const0_rtx;
6525
6526 case BUILT_IN_SYNC_SYNCHRONIZE:
6527 expand_builtin_sync_synchronize ();
6528 return const0_rtx;
6529
6530 case BUILT_IN_ATOMIC_EXCHANGE_1:
6531 case BUILT_IN_ATOMIC_EXCHANGE_2:
6532 case BUILT_IN_ATOMIC_EXCHANGE_4:
6533 case BUILT_IN_ATOMIC_EXCHANGE_8:
6534 case BUILT_IN_ATOMIC_EXCHANGE_16:
6535 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6536 target = expand_builtin_atomic_exchange (mode, exp, target);
6537 if (target)
6538 return target;
6539 break;
6540
6541 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6542 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6543 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6544 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6545 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6546 {
6547 unsigned int nargs, z;
6548 vec<tree, va_gc> *vec;
6549
6550 mode =
6551 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6552 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6553 if (target)
6554 return target;
6555
6556 /* If this is turned into an external library call, the weak parameter
6557 must be dropped to match the expected parameter list. */
6558 nargs = call_expr_nargs (exp);
6559 vec_alloc (vec, nargs - 1);
6560 for (z = 0; z < 3; z++)
6561 vec->quick_push (CALL_EXPR_ARG (exp, z));
6562 /* Skip the boolean weak parameter. */
6563 for (z = 4; z < 6; z++)
6564 vec->quick_push (CALL_EXPR_ARG (exp, z));
6565 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6566 break;
6567 }
6568
6569 case BUILT_IN_ATOMIC_LOAD_1:
6570 case BUILT_IN_ATOMIC_LOAD_2:
6571 case BUILT_IN_ATOMIC_LOAD_4:
6572 case BUILT_IN_ATOMIC_LOAD_8:
6573 case BUILT_IN_ATOMIC_LOAD_16:
6574 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6575 target = expand_builtin_atomic_load (mode, exp, target);
6576 if (target)
6577 return target;
6578 break;
6579
6580 case BUILT_IN_ATOMIC_STORE_1:
6581 case BUILT_IN_ATOMIC_STORE_2:
6582 case BUILT_IN_ATOMIC_STORE_4:
6583 case BUILT_IN_ATOMIC_STORE_8:
6584 case BUILT_IN_ATOMIC_STORE_16:
6585 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6586 target = expand_builtin_atomic_store (mode, exp);
6587 if (target)
6588 return const0_rtx;
6589 break;
6590
6591 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6592 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6593 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6594 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6595 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6596 {
6597 enum built_in_function lib;
6598 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6599 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6600 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6601 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6602 ignore, lib);
6603 if (target)
6604 return target;
6605 break;
6606 }
6607 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6608 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6609 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6610 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6611 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6612 {
6613 enum built_in_function lib;
6614 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6615 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6616 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6617 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6618 ignore, lib);
6619 if (target)
6620 return target;
6621 break;
6622 }
6623 case BUILT_IN_ATOMIC_AND_FETCH_1:
6624 case BUILT_IN_ATOMIC_AND_FETCH_2:
6625 case BUILT_IN_ATOMIC_AND_FETCH_4:
6626 case BUILT_IN_ATOMIC_AND_FETCH_8:
6627 case BUILT_IN_ATOMIC_AND_FETCH_16:
6628 {
6629 enum built_in_function lib;
6630 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6631 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6632 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6633 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6634 ignore, lib);
6635 if (target)
6636 return target;
6637 break;
6638 }
6639 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6640 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6641 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6642 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6643 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6644 {
6645 enum built_in_function lib;
6646 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6647 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6648 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6649 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6650 ignore, lib);
6651 if (target)
6652 return target;
6653 break;
6654 }
6655 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6656 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6657 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6658 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6659 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6660 {
6661 enum built_in_function lib;
6662 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6663 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6664 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6665 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6666 ignore, lib);
6667 if (target)
6668 return target;
6669 break;
6670 }
6671 case BUILT_IN_ATOMIC_OR_FETCH_1:
6672 case BUILT_IN_ATOMIC_OR_FETCH_2:
6673 case BUILT_IN_ATOMIC_OR_FETCH_4:
6674 case BUILT_IN_ATOMIC_OR_FETCH_8:
6675 case BUILT_IN_ATOMIC_OR_FETCH_16:
6676 {
6677 enum built_in_function lib;
6678 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6679 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6680 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6681 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6682 ignore, lib);
6683 if (target)
6684 return target;
6685 break;
6686 }
6687 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6688 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6689 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6690 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6691 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6692 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6693 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6694 ignore, BUILT_IN_NONE);
6695 if (target)
6696 return target;
6697 break;
6698
6699 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6700 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6701 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6702 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6703 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6704 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6705 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6706 ignore, BUILT_IN_NONE);
6707 if (target)
6708 return target;
6709 break;
6710
6711 case BUILT_IN_ATOMIC_FETCH_AND_1:
6712 case BUILT_IN_ATOMIC_FETCH_AND_2:
6713 case BUILT_IN_ATOMIC_FETCH_AND_4:
6714 case BUILT_IN_ATOMIC_FETCH_AND_8:
6715 case BUILT_IN_ATOMIC_FETCH_AND_16:
6716 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6717 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6718 ignore, BUILT_IN_NONE);
6719 if (target)
6720 return target;
6721 break;
6722
6723 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6724 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6725 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6726 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6727 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6728 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6729 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6730 ignore, BUILT_IN_NONE);
6731 if (target)
6732 return target;
6733 break;
6734
6735 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6736 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6737 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6738 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6739 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6741 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6742 ignore, BUILT_IN_NONE);
6743 if (target)
6744 return target;
6745 break;
6746
6747 case BUILT_IN_ATOMIC_FETCH_OR_1:
6748 case BUILT_IN_ATOMIC_FETCH_OR_2:
6749 case BUILT_IN_ATOMIC_FETCH_OR_4:
6750 case BUILT_IN_ATOMIC_FETCH_OR_8:
6751 case BUILT_IN_ATOMIC_FETCH_OR_16:
6752 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6753 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6754 ignore, BUILT_IN_NONE);
6755 if (target)
6756 return target;
6757 break;
6758
6759 case BUILT_IN_ATOMIC_TEST_AND_SET:
6760 return expand_builtin_atomic_test_and_set (exp, target);
6761
6762 case BUILT_IN_ATOMIC_CLEAR:
6763 return expand_builtin_atomic_clear (exp);
6764
6765 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6766 return expand_builtin_atomic_always_lock_free (exp);
6767
6768 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6769 target = expand_builtin_atomic_is_lock_free (exp);
6770 if (target)
6771 return target;
6772 break;
6773
6774 case BUILT_IN_ATOMIC_THREAD_FENCE:
6775 expand_builtin_atomic_thread_fence (exp);
6776 return const0_rtx;
6777
6778 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6779 expand_builtin_atomic_signal_fence (exp);
6780 return const0_rtx;
6781
6782 case BUILT_IN_OBJECT_SIZE:
6783 return expand_builtin_object_size (exp);
6784
6785 case BUILT_IN_MEMCPY_CHK:
6786 case BUILT_IN_MEMPCPY_CHK:
6787 case BUILT_IN_MEMMOVE_CHK:
6788 case BUILT_IN_MEMSET_CHK:
6789 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6790 if (target)
6791 return target;
6792 break;
6793
6794 case BUILT_IN_STRCPY_CHK:
6795 case BUILT_IN_STPCPY_CHK:
6796 case BUILT_IN_STRNCPY_CHK:
6797 case BUILT_IN_STPNCPY_CHK:
6798 case BUILT_IN_STRCAT_CHK:
6799 case BUILT_IN_STRNCAT_CHK:
6800 case BUILT_IN_SNPRINTF_CHK:
6801 case BUILT_IN_VSNPRINTF_CHK:
6802 maybe_emit_chk_warning (exp, fcode);
6803 break;
6804
6805 case BUILT_IN_SPRINTF_CHK:
6806 case BUILT_IN_VSPRINTF_CHK:
6807 maybe_emit_sprintf_chk_warning (exp, fcode);
6808 break;
6809
6810 case BUILT_IN_FREE:
6811 if (warn_free_nonheap_object)
6812 maybe_emit_free_warning (exp);
6813 break;
6814
6815 case BUILT_IN_THREAD_POINTER:
6816 return expand_builtin_thread_pointer (exp, target);
6817
6818 case BUILT_IN_SET_THREAD_POINTER:
6819 expand_builtin_set_thread_pointer (exp);
6820 return const0_rtx;
6821
6822 case BUILT_IN_CILK_DETACH:
6823 expand_builtin_cilk_detach (exp);
6824 return const0_rtx;
6825
6826 case BUILT_IN_CILK_POP_FRAME:
6827 expand_builtin_cilk_pop_frame (exp);
6828 return const0_rtx;
6829
6830 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6831 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6832 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6833 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6834 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6835 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6836 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6837 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6838 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6839 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6840 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6841 /* We allow user CHKP builtins if Pointer Bounds
6842 Checker is off. */
6843 if (!chkp_function_instrumented_p (current_function_decl))
6844 {
6845 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6846 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6847 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6848 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6849 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6850 return expand_normal (CALL_EXPR_ARG (exp, 0));
6851 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6852 return expand_normal (size_zero_node);
6853 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6854 return expand_normal (size_int (-1));
6855 else
6856 return const0_rtx;
6857 }
6858 /* FALLTHROUGH */
6859
6860 case BUILT_IN_CHKP_BNDMK:
6861 case BUILT_IN_CHKP_BNDSTX:
6862 case BUILT_IN_CHKP_BNDCL:
6863 case BUILT_IN_CHKP_BNDCU:
6864 case BUILT_IN_CHKP_BNDLDX:
6865 case BUILT_IN_CHKP_BNDRET:
6866 case BUILT_IN_CHKP_INTERSECT:
6867 case BUILT_IN_CHKP_NARROW:
6868 case BUILT_IN_CHKP_EXTRACT_LOWER:
6869 case BUILT_IN_CHKP_EXTRACT_UPPER:
6870 /* Software implementation of Pointer Bounds Checker is NYI.
6871 Target support is required. */
6872 error ("Your target platform does not support -fcheck-pointer-bounds");
6873 break;
6874
6875 case BUILT_IN_ACC_ON_DEVICE:
6876 /* Do library call, if we failed to expand the builtin when
6877 folding. */
6878 break;
6879
6880 default: /* just do library call, if unknown builtin */
6881 break;
6882 }
6883
6884 /* The switch statement above can drop through to cause the function
6885 to be called normally. */
6886 return expand_call (exp, target, ignore);
6887 }
6888
6889 /* Similar to expand_builtin but is used for instrumented calls. */
6890
6891 rtx
6892 expand_builtin_with_bounds (tree exp, rtx target,
6893 rtx subtarget ATTRIBUTE_UNUSED,
6894 machine_mode mode, int ignore)
6895 {
6896 tree fndecl = get_callee_fndecl (exp);
6897 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6898
6899 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6900
6901 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6902 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6903
6904 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6905 && fcode < END_CHKP_BUILTINS);
6906
6907 switch (fcode)
6908 {
6909 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6910 target = expand_builtin_memcpy_with_bounds (exp, target);
6911 if (target)
6912 return target;
6913 break;
6914
6915 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6916 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6917 if (target)
6918 return target;
6919 break;
6920
6921 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6922 target = expand_builtin_memset_with_bounds (exp, target, mode);
6923 if (target)
6924 return target;
6925 break;
6926
6927 default:
6928 break;
6929 }
6930
6931 /* The switch statement above can drop through to cause the function
6932 to be called normally. */
6933 return expand_call (exp, target, ignore);
6934 }
6935
6936 /* Determine whether a tree node represents a call to a built-in
6937 function. If the tree T is a call to a built-in function with
6938 the right number of arguments of the appropriate types, return
6939 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6940 Otherwise the return value is END_BUILTINS. */
6941
6942 enum built_in_function
6943 builtin_mathfn_code (const_tree t)
6944 {
6945 const_tree fndecl, arg, parmlist;
6946 const_tree argtype, parmtype;
6947 const_call_expr_arg_iterator iter;
6948
6949 if (TREE_CODE (t) != CALL_EXPR
6950 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6951 return END_BUILTINS;
6952
6953 fndecl = get_callee_fndecl (t);
6954 if (fndecl == NULL_TREE
6955 || TREE_CODE (fndecl) != FUNCTION_DECL
6956 || ! DECL_BUILT_IN (fndecl)
6957 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6958 return END_BUILTINS;
6959
6960 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6961 init_const_call_expr_arg_iterator (t, &iter);
6962 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6963 {
6964 /* If a function doesn't take a variable number of arguments,
6965 the last element in the list will have type `void'. */
6966 parmtype = TREE_VALUE (parmlist);
6967 if (VOID_TYPE_P (parmtype))
6968 {
6969 if (more_const_call_expr_args_p (&iter))
6970 return END_BUILTINS;
6971 return DECL_FUNCTION_CODE (fndecl);
6972 }
6973
6974 if (! more_const_call_expr_args_p (&iter))
6975 return END_BUILTINS;
6976
6977 arg = next_const_call_expr_arg (&iter);
6978 argtype = TREE_TYPE (arg);
6979
6980 if (SCALAR_FLOAT_TYPE_P (parmtype))
6981 {
6982 if (! SCALAR_FLOAT_TYPE_P (argtype))
6983 return END_BUILTINS;
6984 }
6985 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6986 {
6987 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6988 return END_BUILTINS;
6989 }
6990 else if (POINTER_TYPE_P (parmtype))
6991 {
6992 if (! POINTER_TYPE_P (argtype))
6993 return END_BUILTINS;
6994 }
6995 else if (INTEGRAL_TYPE_P (parmtype))
6996 {
6997 if (! INTEGRAL_TYPE_P (argtype))
6998 return END_BUILTINS;
6999 }
7000 else
7001 return END_BUILTINS;
7002 }
7003
7004 /* Variable-length argument list. */
7005 return DECL_FUNCTION_CODE (fndecl);
7006 }
7007
7008 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7009 evaluate to a constant. */
7010
7011 static tree
7012 fold_builtin_constant_p (tree arg)
7013 {
7014 /* We return 1 for a numeric type that's known to be a constant
7015 value at compile-time or for an aggregate type that's a
7016 literal constant. */
7017 STRIP_NOPS (arg);
7018
7019 /* If we know this is a constant, emit the constant of one. */
7020 if (CONSTANT_CLASS_P (arg)
7021 || (TREE_CODE (arg) == CONSTRUCTOR
7022 && TREE_CONSTANT (arg)))
7023 return integer_one_node;
7024 if (TREE_CODE (arg) == ADDR_EXPR)
7025 {
7026 tree op = TREE_OPERAND (arg, 0);
7027 if (TREE_CODE (op) == STRING_CST
7028 || (TREE_CODE (op) == ARRAY_REF
7029 && integer_zerop (TREE_OPERAND (op, 1))
7030 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7031 return integer_one_node;
7032 }
7033
7034 /* If this expression has side effects, show we don't know it to be a
7035 constant. Likewise if it's a pointer or aggregate type since in
7036 those case we only want literals, since those are only optimized
7037 when generating RTL, not later.
7038 And finally, if we are compiling an initializer, not code, we
7039 need to return a definite result now; there's not going to be any
7040 more optimization done. */
7041 if (TREE_SIDE_EFFECTS (arg)
7042 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7043 || POINTER_TYPE_P (TREE_TYPE (arg))
7044 || cfun == 0
7045 || folding_initializer
7046 || force_folding_builtin_constant_p)
7047 return integer_zero_node;
7048
7049 return NULL_TREE;
7050 }
7051
7052 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7053 return it as a truthvalue. */
7054
7055 static tree
7056 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7057 tree predictor)
7058 {
7059 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7060
7061 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7062 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7063 ret_type = TREE_TYPE (TREE_TYPE (fn));
7064 pred_type = TREE_VALUE (arg_types);
7065 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7066
7067 pred = fold_convert_loc (loc, pred_type, pred);
7068 expected = fold_convert_loc (loc, expected_type, expected);
7069 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7070 predictor);
7071
7072 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7073 build_int_cst (ret_type, 0));
7074 }
7075
7076 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7077 NULL_TREE if no simplification is possible. */
7078
7079 tree
7080 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7081 {
7082 tree inner, fndecl, inner_arg0;
7083 enum tree_code code;
7084
7085 /* Distribute the expected value over short-circuiting operators.
7086 See through the cast from truthvalue_type_node to long. */
7087 inner_arg0 = arg0;
7088 while (CONVERT_EXPR_P (inner_arg0)
7089 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7090 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7091 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7092
7093 /* If this is a builtin_expect within a builtin_expect keep the
7094 inner one. See through a comparison against a constant. It
7095 might have been added to create a thruthvalue. */
7096 inner = inner_arg0;
7097
7098 if (COMPARISON_CLASS_P (inner)
7099 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7100 inner = TREE_OPERAND (inner, 0);
7101
7102 if (TREE_CODE (inner) == CALL_EXPR
7103 && (fndecl = get_callee_fndecl (inner))
7104 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7105 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7106 return arg0;
7107
7108 inner = inner_arg0;
7109 code = TREE_CODE (inner);
7110 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7111 {
7112 tree op0 = TREE_OPERAND (inner, 0);
7113 tree op1 = TREE_OPERAND (inner, 1);
7114
7115 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7116 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7117 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7118
7119 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7120 }
7121
7122 /* If the argument isn't invariant then there's nothing else we can do. */
7123 if (!TREE_CONSTANT (inner_arg0))
7124 return NULL_TREE;
7125
7126 /* If we expect that a comparison against the argument will fold to
7127 a constant return the constant. In practice, this means a true
7128 constant or the address of a non-weak symbol. */
7129 inner = inner_arg0;
7130 STRIP_NOPS (inner);
7131 if (TREE_CODE (inner) == ADDR_EXPR)
7132 {
7133 do
7134 {
7135 inner = TREE_OPERAND (inner, 0);
7136 }
7137 while (TREE_CODE (inner) == COMPONENT_REF
7138 || TREE_CODE (inner) == ARRAY_REF);
7139 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7140 return NULL_TREE;
7141 }
7142
7143 /* Otherwise, ARG0 already has the proper type for the return value. */
7144 return arg0;
7145 }
7146
7147 /* Fold a call to __builtin_classify_type with argument ARG. */
7148
7149 static tree
7150 fold_builtin_classify_type (tree arg)
7151 {
7152 if (arg == 0)
7153 return build_int_cst (integer_type_node, no_type_class);
7154
7155 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7156 }
7157
7158 /* Fold a call to __builtin_strlen with argument ARG. */
7159
7160 static tree
7161 fold_builtin_strlen (location_t loc, tree type, tree arg)
7162 {
7163 if (!validate_arg (arg, POINTER_TYPE))
7164 return NULL_TREE;
7165 else
7166 {
7167 tree len = c_strlen (arg, 0);
7168
7169 if (len)
7170 return fold_convert_loc (loc, type, len);
7171
7172 return NULL_TREE;
7173 }
7174 }
7175
7176 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7177
7178 static tree
7179 fold_builtin_inf (location_t loc, tree type, int warn)
7180 {
7181 REAL_VALUE_TYPE real;
7182
7183 /* __builtin_inff is intended to be usable to define INFINITY on all
7184 targets. If an infinity is not available, INFINITY expands "to a
7185 positive constant of type float that overflows at translation
7186 time", footnote "In this case, using INFINITY will violate the
7187 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7188 Thus we pedwarn to ensure this constraint violation is
7189 diagnosed. */
7190 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7191 pedwarn (loc, 0, "target format does not support infinity");
7192
7193 real_inf (&real);
7194 return build_real (type, real);
7195 }
7196
7197 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7198 NULL_TREE if no simplification can be made. */
7199
7200 static tree
7201 fold_builtin_sincos (location_t loc,
7202 tree arg0, tree arg1, tree arg2)
7203 {
7204 tree type;
7205 tree fndecl, call = NULL_TREE;
7206
7207 if (!validate_arg (arg0, REAL_TYPE)
7208 || !validate_arg (arg1, POINTER_TYPE)
7209 || !validate_arg (arg2, POINTER_TYPE))
7210 return NULL_TREE;
7211
7212 type = TREE_TYPE (arg0);
7213
7214 /* Calculate the result when the argument is a constant. */
7215 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7216 if (fn == END_BUILTINS)
7217 return NULL_TREE;
7218
7219 /* Canonicalize sincos to cexpi. */
7220 if (TREE_CODE (arg0) == REAL_CST)
7221 {
7222 tree complex_type = build_complex_type (type);
7223 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7224 }
7225 if (!call)
7226 {
7227 if (!targetm.libc_has_function (function_c99_math_complex)
7228 || !builtin_decl_implicit_p (fn))
7229 return NULL_TREE;
7230 fndecl = builtin_decl_explicit (fn);
7231 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7232 call = builtin_save_expr (call);
7233 }
7234
7235 return build2 (COMPOUND_EXPR, void_type_node,
7236 build2 (MODIFY_EXPR, void_type_node,
7237 build_fold_indirect_ref_loc (loc, arg1),
7238 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7239 build2 (MODIFY_EXPR, void_type_node,
7240 build_fold_indirect_ref_loc (loc, arg2),
7241 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7242 }
7243
7244 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7245 Return NULL_TREE if no simplification can be made. */
7246
7247 static tree
7248 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7249 {
7250 if (!validate_arg (arg1, POINTER_TYPE)
7251 || !validate_arg (arg2, POINTER_TYPE)
7252 || !validate_arg (len, INTEGER_TYPE))
7253 return NULL_TREE;
7254
7255 /* If the LEN parameter is zero, return zero. */
7256 if (integer_zerop (len))
7257 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7258 arg1, arg2);
7259
7260 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7261 if (operand_equal_p (arg1, arg2, 0))
7262 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7263
7264 /* If len parameter is one, return an expression corresponding to
7265 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7266 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7267 {
7268 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7269 tree cst_uchar_ptr_node
7270 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7271
7272 tree ind1
7273 = fold_convert_loc (loc, integer_type_node,
7274 build1 (INDIRECT_REF, cst_uchar_node,
7275 fold_convert_loc (loc,
7276 cst_uchar_ptr_node,
7277 arg1)));
7278 tree ind2
7279 = fold_convert_loc (loc, integer_type_node,
7280 build1 (INDIRECT_REF, cst_uchar_node,
7281 fold_convert_loc (loc,
7282 cst_uchar_ptr_node,
7283 arg2)));
7284 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7285 }
7286
7287 return NULL_TREE;
7288 }
7289
7290 /* Fold a call to builtin isascii with argument ARG. */
7291
7292 static tree
7293 fold_builtin_isascii (location_t loc, tree arg)
7294 {
7295 if (!validate_arg (arg, INTEGER_TYPE))
7296 return NULL_TREE;
7297 else
7298 {
7299 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7300 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7301 build_int_cst (integer_type_node,
7302 ~ (unsigned HOST_WIDE_INT) 0x7f));
7303 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7304 arg, integer_zero_node);
7305 }
7306 }
7307
7308 /* Fold a call to builtin toascii with argument ARG. */
7309
7310 static tree
7311 fold_builtin_toascii (location_t loc, tree arg)
7312 {
7313 if (!validate_arg (arg, INTEGER_TYPE))
7314 return NULL_TREE;
7315
7316 /* Transform toascii(c) -> (c & 0x7f). */
7317 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7318 build_int_cst (integer_type_node, 0x7f));
7319 }
7320
7321 /* Fold a call to builtin isdigit with argument ARG. */
7322
7323 static tree
7324 fold_builtin_isdigit (location_t loc, tree arg)
7325 {
7326 if (!validate_arg (arg, INTEGER_TYPE))
7327 return NULL_TREE;
7328 else
7329 {
7330 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7331 /* According to the C standard, isdigit is unaffected by locale.
7332 However, it definitely is affected by the target character set. */
7333 unsigned HOST_WIDE_INT target_digit0
7334 = lang_hooks.to_target_charset ('0');
7335
7336 if (target_digit0 == 0)
7337 return NULL_TREE;
7338
7339 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7340 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7341 build_int_cst (unsigned_type_node, target_digit0));
7342 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7343 build_int_cst (unsigned_type_node, 9));
7344 }
7345 }
7346
7347 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7348
7349 static tree
7350 fold_builtin_fabs (location_t loc, tree arg, tree type)
7351 {
7352 if (!validate_arg (arg, REAL_TYPE))
7353 return NULL_TREE;
7354
7355 arg = fold_convert_loc (loc, type, arg);
7356 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7357 }
7358
7359 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7360
7361 static tree
7362 fold_builtin_abs (location_t loc, tree arg, tree type)
7363 {
7364 if (!validate_arg (arg, INTEGER_TYPE))
7365 return NULL_TREE;
7366
7367 arg = fold_convert_loc (loc, type, arg);
7368 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7369 }
7370
7371 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7372
7373 static tree
7374 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7375 {
7376 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7377 if (validate_arg (arg0, REAL_TYPE)
7378 && validate_arg (arg1, REAL_TYPE)
7379 && validate_arg (arg2, REAL_TYPE)
7380 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7381 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7382
7383 return NULL_TREE;
7384 }
7385
7386 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7387
7388 static tree
7389 fold_builtin_carg (location_t loc, tree arg, tree type)
7390 {
7391 if (validate_arg (arg, COMPLEX_TYPE)
7392 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7393 {
7394 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7395
7396 if (atan2_fn)
7397 {
7398 tree new_arg = builtin_save_expr (arg);
7399 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7400 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7401 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7402 }
7403 }
7404
7405 return NULL_TREE;
7406 }
7407
7408 /* Fold a call to builtin frexp, we can assume the base is 2. */
7409
7410 static tree
7411 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7412 {
7413 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7414 return NULL_TREE;
7415
7416 STRIP_NOPS (arg0);
7417
7418 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7419 return NULL_TREE;
7420
7421 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7422
7423 /* Proceed if a valid pointer type was passed in. */
7424 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7425 {
7426 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7427 tree frac, exp;
7428
7429 switch (value->cl)
7430 {
7431 case rvc_zero:
7432 /* For +-0, return (*exp = 0, +-0). */
7433 exp = integer_zero_node;
7434 frac = arg0;
7435 break;
7436 case rvc_nan:
7437 case rvc_inf:
7438 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7439 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7440 case rvc_normal:
7441 {
7442 /* Since the frexp function always expects base 2, and in
7443 GCC normalized significands are already in the range
7444 [0.5, 1.0), we have exactly what frexp wants. */
7445 REAL_VALUE_TYPE frac_rvt = *value;
7446 SET_REAL_EXP (&frac_rvt, 0);
7447 frac = build_real (rettype, frac_rvt);
7448 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7449 }
7450 break;
7451 default:
7452 gcc_unreachable ();
7453 }
7454
7455 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7456 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7457 TREE_SIDE_EFFECTS (arg1) = 1;
7458 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7459 }
7460
7461 return NULL_TREE;
7462 }
7463
7464 /* Fold a call to builtin modf. */
7465
7466 static tree
7467 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7468 {
7469 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7470 return NULL_TREE;
7471
7472 STRIP_NOPS (arg0);
7473
7474 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7475 return NULL_TREE;
7476
7477 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7478
7479 /* Proceed if a valid pointer type was passed in. */
7480 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7481 {
7482 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7483 REAL_VALUE_TYPE trunc, frac;
7484
7485 switch (value->cl)
7486 {
7487 case rvc_nan:
7488 case rvc_zero:
7489 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7490 trunc = frac = *value;
7491 break;
7492 case rvc_inf:
7493 /* For +-Inf, return (*arg1 = arg0, +-0). */
7494 frac = dconst0;
7495 frac.sign = value->sign;
7496 trunc = *value;
7497 break;
7498 case rvc_normal:
7499 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7500 real_trunc (&trunc, VOIDmode, value);
7501 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7502 /* If the original number was negative and already
7503 integral, then the fractional part is -0.0. */
7504 if (value->sign && frac.cl == rvc_zero)
7505 frac.sign = value->sign;
7506 break;
7507 }
7508
7509 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7510 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7511 build_real (rettype, trunc));
7512 TREE_SIDE_EFFECTS (arg1) = 1;
7513 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7514 build_real (rettype, frac));
7515 }
7516
7517 return NULL_TREE;
7518 }
7519
7520 /* Given a location LOC, an interclass builtin function decl FNDECL
7521 and its single argument ARG, return an folded expression computing
7522 the same, or NULL_TREE if we either couldn't or didn't want to fold
7523 (the latter happen if there's an RTL instruction available). */
7524
7525 static tree
7526 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7527 {
7528 machine_mode mode;
7529
7530 if (!validate_arg (arg, REAL_TYPE))
7531 return NULL_TREE;
7532
7533 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7534 return NULL_TREE;
7535
7536 mode = TYPE_MODE (TREE_TYPE (arg));
7537
7538 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7539
7540 /* If there is no optab, try generic code. */
7541 switch (DECL_FUNCTION_CODE (fndecl))
7542 {
7543 tree result;
7544
7545 CASE_FLT_FN (BUILT_IN_ISINF):
7546 {
7547 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7548 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7549 tree type = TREE_TYPE (arg);
7550 REAL_VALUE_TYPE r;
7551 char buf[128];
7552
7553 if (is_ibm_extended)
7554 {
7555 /* NaN and Inf are encoded in the high-order double value
7556 only. The low-order value is not significant. */
7557 type = double_type_node;
7558 mode = DFmode;
7559 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7560 }
7561 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7562 real_from_string (&r, buf);
7563 result = build_call_expr (isgr_fn, 2,
7564 fold_build1_loc (loc, ABS_EXPR, type, arg),
7565 build_real (type, r));
7566 return result;
7567 }
7568 CASE_FLT_FN (BUILT_IN_FINITE):
7569 case BUILT_IN_ISFINITE:
7570 {
7571 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7572 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7573 tree type = TREE_TYPE (arg);
7574 REAL_VALUE_TYPE r;
7575 char buf[128];
7576
7577 if (is_ibm_extended)
7578 {
7579 /* NaN and Inf are encoded in the high-order double value
7580 only. The low-order value is not significant. */
7581 type = double_type_node;
7582 mode = DFmode;
7583 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7584 }
7585 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7586 real_from_string (&r, buf);
7587 result = build_call_expr (isle_fn, 2,
7588 fold_build1_loc (loc, ABS_EXPR, type, arg),
7589 build_real (type, r));
7590 /*result = fold_build2_loc (loc, UNGT_EXPR,
7591 TREE_TYPE (TREE_TYPE (fndecl)),
7592 fold_build1_loc (loc, ABS_EXPR, type, arg),
7593 build_real (type, r));
7594 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7595 TREE_TYPE (TREE_TYPE (fndecl)),
7596 result);*/
7597 return result;
7598 }
7599 case BUILT_IN_ISNORMAL:
7600 {
7601 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7602 islessequal(fabs(x),DBL_MAX). */
7603 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7604 tree type = TREE_TYPE (arg);
7605 tree orig_arg, max_exp, min_exp;
7606 machine_mode orig_mode = mode;
7607 REAL_VALUE_TYPE rmax, rmin;
7608 char buf[128];
7609
7610 orig_arg = arg = builtin_save_expr (arg);
7611 if (is_ibm_extended)
7612 {
7613 /* Use double to test the normal range of IBM extended
7614 precision. Emin for IBM extended precision is
7615 different to emin for IEEE double, being 53 higher
7616 since the low double exponent is at least 53 lower
7617 than the high double exponent. */
7618 type = double_type_node;
7619 mode = DFmode;
7620 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7621 }
7622 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7623
7624 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7625 real_from_string (&rmax, buf);
7626 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7627 real_from_string (&rmin, buf);
7628 max_exp = build_real (type, rmax);
7629 min_exp = build_real (type, rmin);
7630
7631 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7632 if (is_ibm_extended)
7633 {
7634 /* Testing the high end of the range is done just using
7635 the high double, using the same test as isfinite().
7636 For the subnormal end of the range we first test the
7637 high double, then if its magnitude is equal to the
7638 limit of 0x1p-969, we test whether the low double is
7639 non-zero and opposite sign to the high double. */
7640 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7641 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7642 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7643 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7644 arg, min_exp);
7645 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7646 complex_double_type_node, orig_arg);
7647 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7648 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7649 tree zero = build_real (type, dconst0);
7650 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7651 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7652 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7653 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7654 fold_build3 (COND_EXPR,
7655 integer_type_node,
7656 hilt, logt, lolt));
7657 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7658 eq_min, ok_lo);
7659 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7660 gt_min, eq_min);
7661 }
7662 else
7663 {
7664 tree const isge_fn
7665 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7666 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7667 }
7668 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7669 max_exp, min_exp);
7670 return result;
7671 }
7672 default:
7673 break;
7674 }
7675
7676 return NULL_TREE;
7677 }
7678
7679 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7680 ARG is the argument for the call. */
7681
7682 static tree
7683 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7684 {
7685 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7686
7687 if (!validate_arg (arg, REAL_TYPE))
7688 return NULL_TREE;
7689
7690 switch (builtin_index)
7691 {
7692 case BUILT_IN_ISINF:
7693 if (!HONOR_INFINITIES (arg))
7694 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7695
7696 return NULL_TREE;
7697
7698 case BUILT_IN_ISINF_SIGN:
7699 {
7700 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7701 /* In a boolean context, GCC will fold the inner COND_EXPR to
7702 1. So e.g. "if (isinf_sign(x))" would be folded to just
7703 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7704 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7705 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7706 tree tmp = NULL_TREE;
7707
7708 arg = builtin_save_expr (arg);
7709
7710 if (signbit_fn && isinf_fn)
7711 {
7712 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7713 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7714
7715 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7716 signbit_call, integer_zero_node);
7717 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7718 isinf_call, integer_zero_node);
7719
7720 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7721 integer_minus_one_node, integer_one_node);
7722 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7723 isinf_call, tmp,
7724 integer_zero_node);
7725 }
7726
7727 return tmp;
7728 }
7729
7730 case BUILT_IN_ISFINITE:
7731 if (!HONOR_NANS (arg)
7732 && !HONOR_INFINITIES (arg))
7733 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7734
7735 return NULL_TREE;
7736
7737 case BUILT_IN_ISNAN:
7738 if (!HONOR_NANS (arg))
7739 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7740
7741 {
7742 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7743 if (is_ibm_extended)
7744 {
7745 /* NaN and Inf are encoded in the high-order double value
7746 only. The low-order value is not significant. */
7747 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7748 }
7749 }
7750 arg = builtin_save_expr (arg);
7751 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7752
7753 default:
7754 gcc_unreachable ();
7755 }
7756 }
7757
7758 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7759 This builtin will generate code to return the appropriate floating
7760 point classification depending on the value of the floating point
7761 number passed in. The possible return values must be supplied as
7762 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7763 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7764 one floating point argument which is "type generic". */
7765
7766 static tree
7767 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7768 {
7769 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7770 arg, type, res, tmp;
7771 machine_mode mode;
7772 REAL_VALUE_TYPE r;
7773 char buf[128];
7774
7775 /* Verify the required arguments in the original call. */
7776 if (nargs != 6
7777 || !validate_arg (args[0], INTEGER_TYPE)
7778 || !validate_arg (args[1], INTEGER_TYPE)
7779 || !validate_arg (args[2], INTEGER_TYPE)
7780 || !validate_arg (args[3], INTEGER_TYPE)
7781 || !validate_arg (args[4], INTEGER_TYPE)
7782 || !validate_arg (args[5], REAL_TYPE))
7783 return NULL_TREE;
7784
7785 fp_nan = args[0];
7786 fp_infinite = args[1];
7787 fp_normal = args[2];
7788 fp_subnormal = args[3];
7789 fp_zero = args[4];
7790 arg = args[5];
7791 type = TREE_TYPE (arg);
7792 mode = TYPE_MODE (type);
7793 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7794
7795 /* fpclassify(x) ->
7796 isnan(x) ? FP_NAN :
7797 (fabs(x) == Inf ? FP_INFINITE :
7798 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7799 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7800
7801 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7802 build_real (type, dconst0));
7803 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7804 tmp, fp_zero, fp_subnormal);
7805
7806 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7807 real_from_string (&r, buf);
7808 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7809 arg, build_real (type, r));
7810 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7811
7812 if (HONOR_INFINITIES (mode))
7813 {
7814 real_inf (&r);
7815 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7816 build_real (type, r));
7817 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7818 fp_infinite, res);
7819 }
7820
7821 if (HONOR_NANS (mode))
7822 {
7823 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7824 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
7825 }
7826
7827 return res;
7828 }
7829
7830 /* Fold a call to an unordered comparison function such as
7831 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
7832 being called and ARG0 and ARG1 are the arguments for the call.
7833 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7834 the opposite of the desired result. UNORDERED_CODE is used
7835 for modes that can hold NaNs and ORDERED_CODE is used for
7836 the rest. */
7837
7838 static tree
7839 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
7840 enum tree_code unordered_code,
7841 enum tree_code ordered_code)
7842 {
7843 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7844 enum tree_code code;
7845 tree type0, type1;
7846 enum tree_code code0, code1;
7847 tree cmp_type = NULL_TREE;
7848
7849 type0 = TREE_TYPE (arg0);
7850 type1 = TREE_TYPE (arg1);
7851
7852 code0 = TREE_CODE (type0);
7853 code1 = TREE_CODE (type1);
7854
7855 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
7856 /* Choose the wider of two real types. */
7857 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
7858 ? type0 : type1;
7859 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
7860 cmp_type = type0;
7861 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
7862 cmp_type = type1;
7863
7864 arg0 = fold_convert_loc (loc, cmp_type, arg0);
7865 arg1 = fold_convert_loc (loc, cmp_type, arg1);
7866
7867 if (unordered_code == UNORDERED_EXPR)
7868 {
7869 if (!HONOR_NANS (arg0))
7870 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
7871 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
7872 }
7873
7874 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
7875 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
7876 fold_build2_loc (loc, code, type, arg0, arg1));
7877 }
7878
7879 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
7880 arithmetics if it can never overflow, or into internal functions that
7881 return both result of arithmetics and overflowed boolean flag in
7882 a complex integer result, or some other check for overflow.
7883 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
7884 checking part of that. */
7885
7886 static tree
7887 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
7888 tree arg0, tree arg1, tree arg2)
7889 {
7890 enum internal_fn ifn = IFN_LAST;
7891 /* The code of the expression corresponding to the type-generic
7892 built-in, or ERROR_MARK for the type-specific ones. */
7893 enum tree_code opcode = ERROR_MARK;
7894 bool ovf_only = false;
7895
7896 switch (fcode)
7897 {
7898 case BUILT_IN_ADD_OVERFLOW_P:
7899 ovf_only = true;
7900 /* FALLTHRU */
7901 case BUILT_IN_ADD_OVERFLOW:
7902 opcode = PLUS_EXPR;
7903 /* FALLTHRU */
7904 case BUILT_IN_SADD_OVERFLOW:
7905 case BUILT_IN_SADDL_OVERFLOW:
7906 case BUILT_IN_SADDLL_OVERFLOW:
7907 case BUILT_IN_UADD_OVERFLOW:
7908 case BUILT_IN_UADDL_OVERFLOW:
7909 case BUILT_IN_UADDLL_OVERFLOW:
7910 ifn = IFN_ADD_OVERFLOW;
7911 break;
7912 case BUILT_IN_SUB_OVERFLOW_P:
7913 ovf_only = true;
7914 /* FALLTHRU */
7915 case BUILT_IN_SUB_OVERFLOW:
7916 opcode = MINUS_EXPR;
7917 /* FALLTHRU */
7918 case BUILT_IN_SSUB_OVERFLOW:
7919 case BUILT_IN_SSUBL_OVERFLOW:
7920 case BUILT_IN_SSUBLL_OVERFLOW:
7921 case BUILT_IN_USUB_OVERFLOW:
7922 case BUILT_IN_USUBL_OVERFLOW:
7923 case BUILT_IN_USUBLL_OVERFLOW:
7924 ifn = IFN_SUB_OVERFLOW;
7925 break;
7926 case BUILT_IN_MUL_OVERFLOW_P:
7927 ovf_only = true;
7928 /* FALLTHRU */
7929 case BUILT_IN_MUL_OVERFLOW:
7930 opcode = MULT_EXPR;
7931 /* FALLTHRU */
7932 case BUILT_IN_SMUL_OVERFLOW:
7933 case BUILT_IN_SMULL_OVERFLOW:
7934 case BUILT_IN_SMULLL_OVERFLOW:
7935 case BUILT_IN_UMUL_OVERFLOW:
7936 case BUILT_IN_UMULL_OVERFLOW:
7937 case BUILT_IN_UMULLL_OVERFLOW:
7938 ifn = IFN_MUL_OVERFLOW;
7939 break;
7940 default:
7941 gcc_unreachable ();
7942 }
7943
7944 /* For the "generic" overloads, the first two arguments can have different
7945 types and the last argument determines the target type to use to check
7946 for overflow. The arguments of the other overloads all have the same
7947 type. */
7948 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
7949
7950 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
7951 arguments are constant, attempt to fold the built-in call into a constant
7952 expression indicating whether or not it detected an overflow. */
7953 if (ovf_only
7954 && TREE_CODE (arg0) == INTEGER_CST
7955 && TREE_CODE (arg1) == INTEGER_CST)
7956 /* Perform the computation in the target type and check for overflow. */
7957 return omit_one_operand_loc (loc, boolean_type_node,
7958 arith_overflowed_p (opcode, type, arg0, arg1)
7959 ? boolean_true_node : boolean_false_node,
7960 arg2);
7961
7962 tree ctype = build_complex_type (type);
7963 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
7964 2, arg0, arg1);
7965 tree tgt = save_expr (call);
7966 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
7967 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
7968 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
7969
7970 if (ovf_only)
7971 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
7972
7973 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
7974 tree store
7975 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
7976 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
7977 }
7978
7979 /* Fold a call to __builtin_FILE to a constant string. */
7980
7981 static inline tree
7982 fold_builtin_FILE (location_t loc)
7983 {
7984 if (const char *fname = LOCATION_FILE (loc))
7985 return build_string_literal (strlen (fname) + 1, fname);
7986
7987 return build_string_literal (1, "");
7988 }
7989
7990 /* Fold a call to __builtin_FUNCTION to a constant string. */
7991
7992 static inline tree
7993 fold_builtin_FUNCTION ()
7994 {
7995 if (current_function_decl)
7996 {
7997 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
7998 return build_string_literal (strlen (name) + 1, name);
7999 }
8000
8001 return build_string_literal (1, "");
8002 }
8003
8004 /* Fold a call to __builtin_LINE to an integer constant. */
8005
8006 static inline tree
8007 fold_builtin_LINE (location_t loc, tree type)
8008 {
8009 return build_int_cst (type, LOCATION_LINE (loc));
8010 }
8011
8012 /* Fold a call to built-in function FNDECL with 0 arguments.
8013 This function returns NULL_TREE if no simplification was possible. */
8014
8015 static tree
8016 fold_builtin_0 (location_t loc, tree fndecl)
8017 {
8018 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8019 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8020 switch (fcode)
8021 {
8022 case BUILT_IN_FILE:
8023 return fold_builtin_FILE (loc);
8024
8025 case BUILT_IN_FUNCTION:
8026 return fold_builtin_FUNCTION ();
8027
8028 case BUILT_IN_LINE:
8029 return fold_builtin_LINE (loc, type);
8030
8031 CASE_FLT_FN (BUILT_IN_INF):
8032 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8033 case BUILT_IN_INFD32:
8034 case BUILT_IN_INFD64:
8035 case BUILT_IN_INFD128:
8036 return fold_builtin_inf (loc, type, true);
8037
8038 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8039 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8040 return fold_builtin_inf (loc, type, false);
8041
8042 case BUILT_IN_CLASSIFY_TYPE:
8043 return fold_builtin_classify_type (NULL_TREE);
8044
8045 default:
8046 break;
8047 }
8048 return NULL_TREE;
8049 }
8050
8051 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8052 This function returns NULL_TREE if no simplification was possible. */
8053
8054 static tree
8055 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8056 {
8057 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8058 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8059
8060 if (TREE_CODE (arg0) == ERROR_MARK)
8061 return NULL_TREE;
8062
8063 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8064 return ret;
8065
8066 switch (fcode)
8067 {
8068 case BUILT_IN_CONSTANT_P:
8069 {
8070 tree val = fold_builtin_constant_p (arg0);
8071
8072 /* Gimplification will pull the CALL_EXPR for the builtin out of
8073 an if condition. When not optimizing, we'll not CSE it back.
8074 To avoid link error types of regressions, return false now. */
8075 if (!val && !optimize)
8076 val = integer_zero_node;
8077
8078 return val;
8079 }
8080
8081 case BUILT_IN_CLASSIFY_TYPE:
8082 return fold_builtin_classify_type (arg0);
8083
8084 case BUILT_IN_STRLEN:
8085 return fold_builtin_strlen (loc, type, arg0);
8086
8087 CASE_FLT_FN (BUILT_IN_FABS):
8088 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8089 case BUILT_IN_FABSD32:
8090 case BUILT_IN_FABSD64:
8091 case BUILT_IN_FABSD128:
8092 return fold_builtin_fabs (loc, arg0, type);
8093
8094 case BUILT_IN_ABS:
8095 case BUILT_IN_LABS:
8096 case BUILT_IN_LLABS:
8097 case BUILT_IN_IMAXABS:
8098 return fold_builtin_abs (loc, arg0, type);
8099
8100 CASE_FLT_FN (BUILT_IN_CONJ):
8101 if (validate_arg (arg0, COMPLEX_TYPE)
8102 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8103 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8104 break;
8105
8106 CASE_FLT_FN (BUILT_IN_CREAL):
8107 if (validate_arg (arg0, COMPLEX_TYPE)
8108 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8109 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8110 break;
8111
8112 CASE_FLT_FN (BUILT_IN_CIMAG):
8113 if (validate_arg (arg0, COMPLEX_TYPE)
8114 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8115 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8116 break;
8117
8118 CASE_FLT_FN (BUILT_IN_CARG):
8119 return fold_builtin_carg (loc, arg0, type);
8120
8121 case BUILT_IN_ISASCII:
8122 return fold_builtin_isascii (loc, arg0);
8123
8124 case BUILT_IN_TOASCII:
8125 return fold_builtin_toascii (loc, arg0);
8126
8127 case BUILT_IN_ISDIGIT:
8128 return fold_builtin_isdigit (loc, arg0);
8129
8130 CASE_FLT_FN (BUILT_IN_FINITE):
8131 case BUILT_IN_FINITED32:
8132 case BUILT_IN_FINITED64:
8133 case BUILT_IN_FINITED128:
8134 case BUILT_IN_ISFINITE:
8135 {
8136 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8137 if (ret)
8138 return ret;
8139 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8140 }
8141
8142 CASE_FLT_FN (BUILT_IN_ISINF):
8143 case BUILT_IN_ISINFD32:
8144 case BUILT_IN_ISINFD64:
8145 case BUILT_IN_ISINFD128:
8146 {
8147 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8148 if (ret)
8149 return ret;
8150 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8151 }
8152
8153 case BUILT_IN_ISNORMAL:
8154 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8155
8156 case BUILT_IN_ISINF_SIGN:
8157 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8158
8159 CASE_FLT_FN (BUILT_IN_ISNAN):
8160 case BUILT_IN_ISNAND32:
8161 case BUILT_IN_ISNAND64:
8162 case BUILT_IN_ISNAND128:
8163 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8164
8165 case BUILT_IN_FREE:
8166 if (integer_zerop (arg0))
8167 return build_empty_stmt (loc);
8168 break;
8169
8170 default:
8171 break;
8172 }
8173
8174 return NULL_TREE;
8175
8176 }
8177
8178 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8179 This function returns NULL_TREE if no simplification was possible. */
8180
8181 static tree
8182 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8183 {
8184 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8185 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8186
8187 if (TREE_CODE (arg0) == ERROR_MARK
8188 || TREE_CODE (arg1) == ERROR_MARK)
8189 return NULL_TREE;
8190
8191 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8192 return ret;
8193
8194 switch (fcode)
8195 {
8196 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8197 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8198 if (validate_arg (arg0, REAL_TYPE)
8199 && validate_arg (arg1, POINTER_TYPE))
8200 return do_mpfr_lgamma_r (arg0, arg1, type);
8201 break;
8202
8203 CASE_FLT_FN (BUILT_IN_FREXP):
8204 return fold_builtin_frexp (loc, arg0, arg1, type);
8205
8206 CASE_FLT_FN (BUILT_IN_MODF):
8207 return fold_builtin_modf (loc, arg0, arg1, type);
8208
8209 case BUILT_IN_STRSTR:
8210 return fold_builtin_strstr (loc, arg0, arg1, type);
8211
8212 case BUILT_IN_STRSPN:
8213 return fold_builtin_strspn (loc, arg0, arg1);
8214
8215 case BUILT_IN_STRCSPN:
8216 return fold_builtin_strcspn (loc, arg0, arg1);
8217
8218 case BUILT_IN_STRPBRK:
8219 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8220
8221 case BUILT_IN_EXPECT:
8222 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8223
8224 case BUILT_IN_ISGREATER:
8225 return fold_builtin_unordered_cmp (loc, fndecl,
8226 arg0, arg1, UNLE_EXPR, LE_EXPR);
8227 case BUILT_IN_ISGREATEREQUAL:
8228 return fold_builtin_unordered_cmp (loc, fndecl,
8229 arg0, arg1, UNLT_EXPR, LT_EXPR);
8230 case BUILT_IN_ISLESS:
8231 return fold_builtin_unordered_cmp (loc, fndecl,
8232 arg0, arg1, UNGE_EXPR, GE_EXPR);
8233 case BUILT_IN_ISLESSEQUAL:
8234 return fold_builtin_unordered_cmp (loc, fndecl,
8235 arg0, arg1, UNGT_EXPR, GT_EXPR);
8236 case BUILT_IN_ISLESSGREATER:
8237 return fold_builtin_unordered_cmp (loc, fndecl,
8238 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8239 case BUILT_IN_ISUNORDERED:
8240 return fold_builtin_unordered_cmp (loc, fndecl,
8241 arg0, arg1, UNORDERED_EXPR,
8242 NOP_EXPR);
8243
8244 /* We do the folding for va_start in the expander. */
8245 case BUILT_IN_VA_START:
8246 break;
8247
8248 case BUILT_IN_OBJECT_SIZE:
8249 return fold_builtin_object_size (arg0, arg1);
8250
8251 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8252 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8253
8254 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8255 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8256
8257 default:
8258 break;
8259 }
8260 return NULL_TREE;
8261 }
8262
8263 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8264 and ARG2.
8265 This function returns NULL_TREE if no simplification was possible. */
8266
8267 static tree
8268 fold_builtin_3 (location_t loc, tree fndecl,
8269 tree arg0, tree arg1, tree arg2)
8270 {
8271 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8272 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8273
8274 if (TREE_CODE (arg0) == ERROR_MARK
8275 || TREE_CODE (arg1) == ERROR_MARK
8276 || TREE_CODE (arg2) == ERROR_MARK)
8277 return NULL_TREE;
8278
8279 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8280 arg0, arg1, arg2))
8281 return ret;
8282
8283 switch (fcode)
8284 {
8285
8286 CASE_FLT_FN (BUILT_IN_SINCOS):
8287 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8288
8289 CASE_FLT_FN (BUILT_IN_FMA):
8290 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8291
8292 CASE_FLT_FN (BUILT_IN_REMQUO):
8293 if (validate_arg (arg0, REAL_TYPE)
8294 && validate_arg (arg1, REAL_TYPE)
8295 && validate_arg (arg2, POINTER_TYPE))
8296 return do_mpfr_remquo (arg0, arg1, arg2);
8297 break;
8298
8299 case BUILT_IN_BCMP:
8300 case BUILT_IN_MEMCMP:
8301 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8302
8303 case BUILT_IN_EXPECT:
8304 return fold_builtin_expect (loc, arg0, arg1, arg2);
8305
8306 case BUILT_IN_ADD_OVERFLOW:
8307 case BUILT_IN_SUB_OVERFLOW:
8308 case BUILT_IN_MUL_OVERFLOW:
8309 case BUILT_IN_ADD_OVERFLOW_P:
8310 case BUILT_IN_SUB_OVERFLOW_P:
8311 case BUILT_IN_MUL_OVERFLOW_P:
8312 case BUILT_IN_SADD_OVERFLOW:
8313 case BUILT_IN_SADDL_OVERFLOW:
8314 case BUILT_IN_SADDLL_OVERFLOW:
8315 case BUILT_IN_SSUB_OVERFLOW:
8316 case BUILT_IN_SSUBL_OVERFLOW:
8317 case BUILT_IN_SSUBLL_OVERFLOW:
8318 case BUILT_IN_SMUL_OVERFLOW:
8319 case BUILT_IN_SMULL_OVERFLOW:
8320 case BUILT_IN_SMULLL_OVERFLOW:
8321 case BUILT_IN_UADD_OVERFLOW:
8322 case BUILT_IN_UADDL_OVERFLOW:
8323 case BUILT_IN_UADDLL_OVERFLOW:
8324 case BUILT_IN_USUB_OVERFLOW:
8325 case BUILT_IN_USUBL_OVERFLOW:
8326 case BUILT_IN_USUBLL_OVERFLOW:
8327 case BUILT_IN_UMUL_OVERFLOW:
8328 case BUILT_IN_UMULL_OVERFLOW:
8329 case BUILT_IN_UMULLL_OVERFLOW:
8330 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8331
8332 default:
8333 break;
8334 }
8335 return NULL_TREE;
8336 }
8337
8338 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8339 arguments. IGNORE is true if the result of the
8340 function call is ignored. This function returns NULL_TREE if no
8341 simplification was possible. */
8342
8343 tree
8344 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8345 {
8346 tree ret = NULL_TREE;
8347
8348 switch (nargs)
8349 {
8350 case 0:
8351 ret = fold_builtin_0 (loc, fndecl);
8352 break;
8353 case 1:
8354 ret = fold_builtin_1 (loc, fndecl, args[0]);
8355 break;
8356 case 2:
8357 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8358 break;
8359 case 3:
8360 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8361 break;
8362 default:
8363 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8364 break;
8365 }
8366 if (ret)
8367 {
8368 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8369 SET_EXPR_LOCATION (ret, loc);
8370 TREE_NO_WARNING (ret) = 1;
8371 return ret;
8372 }
8373 return NULL_TREE;
8374 }
8375
8376 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8377 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8378 of arguments in ARGS to be omitted. OLDNARGS is the number of
8379 elements in ARGS. */
8380
8381 static tree
8382 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8383 int skip, tree fndecl, int n, va_list newargs)
8384 {
8385 int nargs = oldnargs - skip + n;
8386 tree *buffer;
8387
8388 if (n > 0)
8389 {
8390 int i, j;
8391
8392 buffer = XALLOCAVEC (tree, nargs);
8393 for (i = 0; i < n; i++)
8394 buffer[i] = va_arg (newargs, tree);
8395 for (j = skip; j < oldnargs; j++, i++)
8396 buffer[i] = args[j];
8397 }
8398 else
8399 buffer = args + skip;
8400
8401 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8402 }
8403
8404 /* Return true if FNDECL shouldn't be folded right now.
8405 If a built-in function has an inline attribute always_inline
8406 wrapper, defer folding it after always_inline functions have
8407 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8408 might not be performed. */
8409
8410 bool
8411 avoid_folding_inline_builtin (tree fndecl)
8412 {
8413 return (DECL_DECLARED_INLINE_P (fndecl)
8414 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8415 && cfun
8416 && !cfun->always_inline_functions_inlined
8417 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8418 }
8419
8420 /* A wrapper function for builtin folding that prevents warnings for
8421 "statement without effect" and the like, caused by removing the
8422 call node earlier than the warning is generated. */
8423
8424 tree
8425 fold_call_expr (location_t loc, tree exp, bool ignore)
8426 {
8427 tree ret = NULL_TREE;
8428 tree fndecl = get_callee_fndecl (exp);
8429 if (fndecl
8430 && TREE_CODE (fndecl) == FUNCTION_DECL
8431 && DECL_BUILT_IN (fndecl)
8432 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8433 yet. Defer folding until we see all the arguments
8434 (after inlining). */
8435 && !CALL_EXPR_VA_ARG_PACK (exp))
8436 {
8437 int nargs = call_expr_nargs (exp);
8438
8439 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8440 instead last argument is __builtin_va_arg_pack (). Defer folding
8441 even in that case, until arguments are finalized. */
8442 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8443 {
8444 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8445 if (fndecl2
8446 && TREE_CODE (fndecl2) == FUNCTION_DECL
8447 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8448 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8449 return NULL_TREE;
8450 }
8451
8452 if (avoid_folding_inline_builtin (fndecl))
8453 return NULL_TREE;
8454
8455 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8456 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8457 CALL_EXPR_ARGP (exp), ignore);
8458 else
8459 {
8460 tree *args = CALL_EXPR_ARGP (exp);
8461 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8462 if (ret)
8463 return ret;
8464 }
8465 }
8466 return NULL_TREE;
8467 }
8468
8469 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8470 N arguments are passed in the array ARGARRAY. Return a folded
8471 expression or NULL_TREE if no simplification was possible. */
8472
8473 tree
8474 fold_builtin_call_array (location_t loc, tree,
8475 tree fn,
8476 int n,
8477 tree *argarray)
8478 {
8479 if (TREE_CODE (fn) != ADDR_EXPR)
8480 return NULL_TREE;
8481
8482 tree fndecl = TREE_OPERAND (fn, 0);
8483 if (TREE_CODE (fndecl) == FUNCTION_DECL
8484 && DECL_BUILT_IN (fndecl))
8485 {
8486 /* If last argument is __builtin_va_arg_pack (), arguments to this
8487 function are not finalized yet. Defer folding until they are. */
8488 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8489 {
8490 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8491 if (fndecl2
8492 && TREE_CODE (fndecl2) == FUNCTION_DECL
8493 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8494 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8495 return NULL_TREE;
8496 }
8497 if (avoid_folding_inline_builtin (fndecl))
8498 return NULL_TREE;
8499 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8500 return targetm.fold_builtin (fndecl, n, argarray, false);
8501 else
8502 return fold_builtin_n (loc, fndecl, argarray, n, false);
8503 }
8504
8505 return NULL_TREE;
8506 }
8507
8508 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8509 along with N new arguments specified as the "..." parameters. SKIP
8510 is the number of arguments in EXP to be omitted. This function is used
8511 to do varargs-to-varargs transformations. */
8512
8513 static tree
8514 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8515 {
8516 va_list ap;
8517 tree t;
8518
8519 va_start (ap, n);
8520 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8521 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8522 va_end (ap);
8523
8524 return t;
8525 }
8526
8527 /* Validate a single argument ARG against a tree code CODE representing
8528 a type. */
8529
8530 static bool
8531 validate_arg (const_tree arg, enum tree_code code)
8532 {
8533 if (!arg)
8534 return false;
8535 else if (code == POINTER_TYPE)
8536 return POINTER_TYPE_P (TREE_TYPE (arg));
8537 else if (code == INTEGER_TYPE)
8538 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8539 return code == TREE_CODE (TREE_TYPE (arg));
8540 }
8541
8542 /* This function validates the types of a function call argument list
8543 against a specified list of tree_codes. If the last specifier is a 0,
8544 that represents an ellipses, otherwise the last specifier must be a
8545 VOID_TYPE.
8546
8547 This is the GIMPLE version of validate_arglist. Eventually we want to
8548 completely convert builtins.c to work from GIMPLEs and the tree based
8549 validate_arglist will then be removed. */
8550
8551 bool
8552 validate_gimple_arglist (const gcall *call, ...)
8553 {
8554 enum tree_code code;
8555 bool res = 0;
8556 va_list ap;
8557 const_tree arg;
8558 size_t i;
8559
8560 va_start (ap, call);
8561 i = 0;
8562
8563 do
8564 {
8565 code = (enum tree_code) va_arg (ap, int);
8566 switch (code)
8567 {
8568 case 0:
8569 /* This signifies an ellipses, any further arguments are all ok. */
8570 res = true;
8571 goto end;
8572 case VOID_TYPE:
8573 /* This signifies an endlink, if no arguments remain, return
8574 true, otherwise return false. */
8575 res = (i == gimple_call_num_args (call));
8576 goto end;
8577 default:
8578 /* If no parameters remain or the parameter's code does not
8579 match the specified code, return false. Otherwise continue
8580 checking any remaining arguments. */
8581 arg = gimple_call_arg (call, i++);
8582 if (!validate_arg (arg, code))
8583 goto end;
8584 break;
8585 }
8586 }
8587 while (1);
8588
8589 /* We need gotos here since we can only have one VA_CLOSE in a
8590 function. */
8591 end: ;
8592 va_end (ap);
8593
8594 return res;
8595 }
8596
8597 /* Default target-specific builtin expander that does nothing. */
8598
8599 rtx
8600 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8601 rtx target ATTRIBUTE_UNUSED,
8602 rtx subtarget ATTRIBUTE_UNUSED,
8603 machine_mode mode ATTRIBUTE_UNUSED,
8604 int ignore ATTRIBUTE_UNUSED)
8605 {
8606 return NULL_RTX;
8607 }
8608
8609 /* Returns true is EXP represents data that would potentially reside
8610 in a readonly section. */
8611
8612 bool
8613 readonly_data_expr (tree exp)
8614 {
8615 STRIP_NOPS (exp);
8616
8617 if (TREE_CODE (exp) != ADDR_EXPR)
8618 return false;
8619
8620 exp = get_base_address (TREE_OPERAND (exp, 0));
8621 if (!exp)
8622 return false;
8623
8624 /* Make sure we call decl_readonly_section only for trees it
8625 can handle (since it returns true for everything it doesn't
8626 understand). */
8627 if (TREE_CODE (exp) == STRING_CST
8628 || TREE_CODE (exp) == CONSTRUCTOR
8629 || (VAR_P (exp) && TREE_STATIC (exp)))
8630 return decl_readonly_section (exp, 0);
8631 else
8632 return false;
8633 }
8634
8635 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8636 to the call, and TYPE is its return type.
8637
8638 Return NULL_TREE if no simplification was possible, otherwise return the
8639 simplified form of the call as a tree.
8640
8641 The simplified form may be a constant or other expression which
8642 computes the same value, but in a more efficient manner (including
8643 calls to other builtin functions).
8644
8645 The call may contain arguments which need to be evaluated, but
8646 which are not useful to determine the result of the call. In
8647 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8648 COMPOUND_EXPR will be an argument which must be evaluated.
8649 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8650 COMPOUND_EXPR in the chain will contain the tree for the simplified
8651 form of the builtin function call. */
8652
8653 static tree
8654 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8655 {
8656 if (!validate_arg (s1, POINTER_TYPE)
8657 || !validate_arg (s2, POINTER_TYPE))
8658 return NULL_TREE;
8659 else
8660 {
8661 tree fn;
8662 const char *p1, *p2;
8663
8664 p2 = c_getstr (s2);
8665 if (p2 == NULL)
8666 return NULL_TREE;
8667
8668 p1 = c_getstr (s1);
8669 if (p1 != NULL)
8670 {
8671 const char *r = strstr (p1, p2);
8672 tree tem;
8673
8674 if (r == NULL)
8675 return build_int_cst (TREE_TYPE (s1), 0);
8676
8677 /* Return an offset into the constant string argument. */
8678 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8679 return fold_convert_loc (loc, type, tem);
8680 }
8681
8682 /* The argument is const char *, and the result is char *, so we need
8683 a type conversion here to avoid a warning. */
8684 if (p2[0] == '\0')
8685 return fold_convert_loc (loc, type, s1);
8686
8687 if (p2[1] != '\0')
8688 return NULL_TREE;
8689
8690 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8691 if (!fn)
8692 return NULL_TREE;
8693
8694 /* New argument list transforming strstr(s1, s2) to
8695 strchr(s1, s2[0]). */
8696 return build_call_expr_loc (loc, fn, 2, s1,
8697 build_int_cst (integer_type_node, p2[0]));
8698 }
8699 }
8700
8701 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8702 to the call, and TYPE is its return type.
8703
8704 Return NULL_TREE if no simplification was possible, otherwise return the
8705 simplified form of the call as a tree.
8706
8707 The simplified form may be a constant or other expression which
8708 computes the same value, but in a more efficient manner (including
8709 calls to other builtin functions).
8710
8711 The call may contain arguments which need to be evaluated, but
8712 which are not useful to determine the result of the call. In
8713 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8714 COMPOUND_EXPR will be an argument which must be evaluated.
8715 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8716 COMPOUND_EXPR in the chain will contain the tree for the simplified
8717 form of the builtin function call. */
8718
8719 static tree
8720 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8721 {
8722 if (!validate_arg (s1, POINTER_TYPE)
8723 || !validate_arg (s2, POINTER_TYPE))
8724 return NULL_TREE;
8725 else
8726 {
8727 tree fn;
8728 const char *p1, *p2;
8729
8730 p2 = c_getstr (s2);
8731 if (p2 == NULL)
8732 return NULL_TREE;
8733
8734 p1 = c_getstr (s1);
8735 if (p1 != NULL)
8736 {
8737 const char *r = strpbrk (p1, p2);
8738 tree tem;
8739
8740 if (r == NULL)
8741 return build_int_cst (TREE_TYPE (s1), 0);
8742
8743 /* Return an offset into the constant string argument. */
8744 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8745 return fold_convert_loc (loc, type, tem);
8746 }
8747
8748 if (p2[0] == '\0')
8749 /* strpbrk(x, "") == NULL.
8750 Evaluate and ignore s1 in case it had side-effects. */
8751 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
8752
8753 if (p2[1] != '\0')
8754 return NULL_TREE; /* Really call strpbrk. */
8755
8756 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8757 if (!fn)
8758 return NULL_TREE;
8759
8760 /* New argument list transforming strpbrk(s1, s2) to
8761 strchr(s1, s2[0]). */
8762 return build_call_expr_loc (loc, fn, 2, s1,
8763 build_int_cst (integer_type_node, p2[0]));
8764 }
8765 }
8766
8767 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8768 to the call.
8769
8770 Return NULL_TREE if no simplification was possible, otherwise return the
8771 simplified form of the call as a tree.
8772
8773 The simplified form may be a constant or other expression which
8774 computes the same value, but in a more efficient manner (including
8775 calls to other builtin functions).
8776
8777 The call may contain arguments which need to be evaluated, but
8778 which are not useful to determine the result of the call. In
8779 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8780 COMPOUND_EXPR will be an argument which must be evaluated.
8781 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8782 COMPOUND_EXPR in the chain will contain the tree for the simplified
8783 form of the builtin function call. */
8784
8785 static tree
8786 fold_builtin_strspn (location_t loc, tree s1, tree s2)
8787 {
8788 if (!validate_arg (s1, POINTER_TYPE)
8789 || !validate_arg (s2, POINTER_TYPE))
8790 return NULL_TREE;
8791 else
8792 {
8793 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8794
8795 /* If either argument is "", return NULL_TREE. */
8796 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
8797 /* Evaluate and ignore both arguments in case either one has
8798 side-effects. */
8799 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
8800 s1, s2);
8801 return NULL_TREE;
8802 }
8803 }
8804
8805 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
8806 to the call.
8807
8808 Return NULL_TREE if no simplification was possible, otherwise return the
8809 simplified form of the call as a tree.
8810
8811 The simplified form may be a constant or other expression which
8812 computes the same value, but in a more efficient manner (including
8813 calls to other builtin functions).
8814
8815 The call may contain arguments which need to be evaluated, but
8816 which are not useful to determine the result of the call. In
8817 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8818 COMPOUND_EXPR will be an argument which must be evaluated.
8819 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8820 COMPOUND_EXPR in the chain will contain the tree for the simplified
8821 form of the builtin function call. */
8822
8823 static tree
8824 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
8825 {
8826 if (!validate_arg (s1, POINTER_TYPE)
8827 || !validate_arg (s2, POINTER_TYPE))
8828 return NULL_TREE;
8829 else
8830 {
8831 /* If the first argument is "", return NULL_TREE. */
8832 const char *p1 = c_getstr (s1);
8833 if (p1 && *p1 == '\0')
8834 {
8835 /* Evaluate and ignore argument s2 in case it has
8836 side-effects. */
8837 return omit_one_operand_loc (loc, size_type_node,
8838 size_zero_node, s2);
8839 }
8840
8841 /* If the second argument is "", return __builtin_strlen(s1). */
8842 const char *p2 = c_getstr (s2);
8843 if (p2 && *p2 == '\0')
8844 {
8845 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
8846
8847 /* If the replacement _DECL isn't initialized, don't do the
8848 transformation. */
8849 if (!fn)
8850 return NULL_TREE;
8851
8852 return build_call_expr_loc (loc, fn, 1, s1);
8853 }
8854 return NULL_TREE;
8855 }
8856 }
8857
8858 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
8859 produced. False otherwise. This is done so that we don't output the error
8860 or warning twice or three times. */
8861
8862 bool
8863 fold_builtin_next_arg (tree exp, bool va_start_p)
8864 {
8865 tree fntype = TREE_TYPE (current_function_decl);
8866 int nargs = call_expr_nargs (exp);
8867 tree arg;
8868 /* There is good chance the current input_location points inside the
8869 definition of the va_start macro (perhaps on the token for
8870 builtin) in a system header, so warnings will not be emitted.
8871 Use the location in real source code. */
8872 source_location current_location =
8873 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
8874 NULL);
8875
8876 if (!stdarg_p (fntype))
8877 {
8878 error ("%<va_start%> used in function with fixed args");
8879 return true;
8880 }
8881
8882 if (va_start_p)
8883 {
8884 if (va_start_p && (nargs != 2))
8885 {
8886 error ("wrong number of arguments to function %<va_start%>");
8887 return true;
8888 }
8889 arg = CALL_EXPR_ARG (exp, 1);
8890 }
8891 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
8892 when we checked the arguments and if needed issued a warning. */
8893 else
8894 {
8895 if (nargs == 0)
8896 {
8897 /* Evidently an out of date version of <stdarg.h>; can't validate
8898 va_start's second argument, but can still work as intended. */
8899 warning_at (current_location,
8900 OPT_Wvarargs,
8901 "%<__builtin_next_arg%> called without an argument");
8902 return true;
8903 }
8904 else if (nargs > 1)
8905 {
8906 error ("wrong number of arguments to function %<__builtin_next_arg%>");
8907 return true;
8908 }
8909 arg = CALL_EXPR_ARG (exp, 0);
8910 }
8911
8912 if (TREE_CODE (arg) == SSA_NAME)
8913 arg = SSA_NAME_VAR (arg);
8914
8915 /* We destructively modify the call to be __builtin_va_start (ap, 0)
8916 or __builtin_next_arg (0) the first time we see it, after checking
8917 the arguments and if needed issuing a warning. */
8918 if (!integer_zerop (arg))
8919 {
8920 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8921
8922 /* Strip off all nops for the sake of the comparison. This
8923 is not quite the same as STRIP_NOPS. It does more.
8924 We must also strip off INDIRECT_EXPR for C++ reference
8925 parameters. */
8926 while (CONVERT_EXPR_P (arg)
8927 || TREE_CODE (arg) == INDIRECT_REF)
8928 arg = TREE_OPERAND (arg, 0);
8929 if (arg != last_parm)
8930 {
8931 /* FIXME: Sometimes with the tree optimizers we can get the
8932 not the last argument even though the user used the last
8933 argument. We just warn and set the arg to be the last
8934 argument so that we will get wrong-code because of
8935 it. */
8936 warning_at (current_location,
8937 OPT_Wvarargs,
8938 "second parameter of %<va_start%> not last named argument");
8939 }
8940
8941 /* Undefined by C99 7.15.1.4p4 (va_start):
8942 "If the parameter parmN is declared with the register storage
8943 class, with a function or array type, or with a type that is
8944 not compatible with the type that results after application of
8945 the default argument promotions, the behavior is undefined."
8946 */
8947 else if (DECL_REGISTER (arg))
8948 {
8949 warning_at (current_location,
8950 OPT_Wvarargs,
8951 "undefined behavior when second parameter of "
8952 "%<va_start%> is declared with %<register%> storage");
8953 }
8954
8955 /* We want to verify the second parameter just once before the tree
8956 optimizers are run and then avoid keeping it in the tree,
8957 as otherwise we could warn even for correct code like:
8958 void foo (int i, ...)
8959 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
8960 if (va_start_p)
8961 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
8962 else
8963 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
8964 }
8965 return false;
8966 }
8967
8968
8969 /* Expand a call EXP to __builtin_object_size. */
8970
8971 static rtx
8972 expand_builtin_object_size (tree exp)
8973 {
8974 tree ost;
8975 int object_size_type;
8976 tree fndecl = get_callee_fndecl (exp);
8977
8978 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8979 {
8980 error ("%Kfirst argument of %D must be a pointer, second integer constant",
8981 exp, fndecl);
8982 expand_builtin_trap ();
8983 return const0_rtx;
8984 }
8985
8986 ost = CALL_EXPR_ARG (exp, 1);
8987 STRIP_NOPS (ost);
8988
8989 if (TREE_CODE (ost) != INTEGER_CST
8990 || tree_int_cst_sgn (ost) < 0
8991 || compare_tree_int (ost, 3) > 0)
8992 {
8993 error ("%Klast argument of %D is not integer constant between 0 and 3",
8994 exp, fndecl);
8995 expand_builtin_trap ();
8996 return const0_rtx;
8997 }
8998
8999 object_size_type = tree_to_shwi (ost);
9000
9001 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9002 }
9003
9004 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9005 FCODE is the BUILT_IN_* to use.
9006 Return NULL_RTX if we failed; the caller should emit a normal call,
9007 otherwise try to get the result in TARGET, if convenient (and in
9008 mode MODE if that's convenient). */
9009
9010 static rtx
9011 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9012 enum built_in_function fcode)
9013 {
9014 tree dest, src, len, size;
9015
9016 if (!validate_arglist (exp,
9017 POINTER_TYPE,
9018 fcode == BUILT_IN_MEMSET_CHK
9019 ? INTEGER_TYPE : POINTER_TYPE,
9020 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9021 return NULL_RTX;
9022
9023 dest = CALL_EXPR_ARG (exp, 0);
9024 src = CALL_EXPR_ARG (exp, 1);
9025 len = CALL_EXPR_ARG (exp, 2);
9026 size = CALL_EXPR_ARG (exp, 3);
9027
9028 if (! tree_fits_uhwi_p (size))
9029 return NULL_RTX;
9030
9031 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9032 {
9033 tree fn;
9034
9035 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9036 {
9037 warning_at (tree_nonartificial_location (exp),
9038 0, "%Kcall to %D will always overflow destination buffer",
9039 exp, get_callee_fndecl (exp));
9040 return NULL_RTX;
9041 }
9042
9043 fn = NULL_TREE;
9044 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9045 mem{cpy,pcpy,move,set} is available. */
9046 switch (fcode)
9047 {
9048 case BUILT_IN_MEMCPY_CHK:
9049 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9050 break;
9051 case BUILT_IN_MEMPCPY_CHK:
9052 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9053 break;
9054 case BUILT_IN_MEMMOVE_CHK:
9055 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9056 break;
9057 case BUILT_IN_MEMSET_CHK:
9058 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9059 break;
9060 default:
9061 break;
9062 }
9063
9064 if (! fn)
9065 return NULL_RTX;
9066
9067 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9068 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9069 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9070 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9071 }
9072 else if (fcode == BUILT_IN_MEMSET_CHK)
9073 return NULL_RTX;
9074 else
9075 {
9076 unsigned int dest_align = get_pointer_alignment (dest);
9077
9078 /* If DEST is not a pointer type, call the normal function. */
9079 if (dest_align == 0)
9080 return NULL_RTX;
9081
9082 /* If SRC and DEST are the same (and not volatile), do nothing. */
9083 if (operand_equal_p (src, dest, 0))
9084 {
9085 tree expr;
9086
9087 if (fcode != BUILT_IN_MEMPCPY_CHK)
9088 {
9089 /* Evaluate and ignore LEN in case it has side-effects. */
9090 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9091 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9092 }
9093
9094 expr = fold_build_pointer_plus (dest, len);
9095 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9096 }
9097
9098 /* __memmove_chk special case. */
9099 if (fcode == BUILT_IN_MEMMOVE_CHK)
9100 {
9101 unsigned int src_align = get_pointer_alignment (src);
9102
9103 if (src_align == 0)
9104 return NULL_RTX;
9105
9106 /* If src is categorized for a readonly section we can use
9107 normal __memcpy_chk. */
9108 if (readonly_data_expr (src))
9109 {
9110 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9111 if (!fn)
9112 return NULL_RTX;
9113 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9114 dest, src, len, size);
9115 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9116 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9117 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9118 }
9119 }
9120 return NULL_RTX;
9121 }
9122 }
9123
9124 /* Emit warning if a buffer overflow is detected at compile time. */
9125
9126 static void
9127 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9128 {
9129 int is_strlen = 0;
9130 tree len, size;
9131 location_t loc = tree_nonartificial_location (exp);
9132
9133 switch (fcode)
9134 {
9135 case BUILT_IN_STRCPY_CHK:
9136 case BUILT_IN_STPCPY_CHK:
9137 /* For __strcat_chk the warning will be emitted only if overflowing
9138 by at least strlen (dest) + 1 bytes. */
9139 case BUILT_IN_STRCAT_CHK:
9140 len = CALL_EXPR_ARG (exp, 1);
9141 size = CALL_EXPR_ARG (exp, 2);
9142 is_strlen = 1;
9143 break;
9144 case BUILT_IN_STRNCAT_CHK:
9145 case BUILT_IN_STRNCPY_CHK:
9146 case BUILT_IN_STPNCPY_CHK:
9147 len = CALL_EXPR_ARG (exp, 2);
9148 size = CALL_EXPR_ARG (exp, 3);
9149 break;
9150 case BUILT_IN_SNPRINTF_CHK:
9151 case BUILT_IN_VSNPRINTF_CHK:
9152 len = CALL_EXPR_ARG (exp, 1);
9153 size = CALL_EXPR_ARG (exp, 3);
9154 break;
9155 default:
9156 gcc_unreachable ();
9157 }
9158
9159 if (!len || !size)
9160 return;
9161
9162 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9163 return;
9164
9165 if (is_strlen)
9166 {
9167 len = c_strlen (len, 1);
9168 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9169 return;
9170 }
9171 else if (fcode == BUILT_IN_STRNCAT_CHK)
9172 {
9173 tree src = CALL_EXPR_ARG (exp, 1);
9174 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9175 return;
9176 src = c_strlen (src, 1);
9177 if (! src || ! tree_fits_uhwi_p (src))
9178 {
9179 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9180 exp, get_callee_fndecl (exp));
9181 return;
9182 }
9183 else if (tree_int_cst_lt (src, size))
9184 return;
9185 }
9186 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9187 return;
9188
9189 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9190 exp, get_callee_fndecl (exp));
9191 }
9192
9193 /* Emit warning if a buffer overflow is detected at compile time
9194 in __sprintf_chk/__vsprintf_chk calls. */
9195
9196 static void
9197 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9198 {
9199 tree size, len, fmt;
9200 const char *fmt_str;
9201 int nargs = call_expr_nargs (exp);
9202
9203 /* Verify the required arguments in the original call. */
9204
9205 if (nargs < 4)
9206 return;
9207 size = CALL_EXPR_ARG (exp, 2);
9208 fmt = CALL_EXPR_ARG (exp, 3);
9209
9210 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9211 return;
9212
9213 /* Check whether the format is a literal string constant. */
9214 fmt_str = c_getstr (fmt);
9215 if (fmt_str == NULL)
9216 return;
9217
9218 if (!init_target_chars ())
9219 return;
9220
9221 /* If the format doesn't contain % args or %%, we know its size. */
9222 if (strchr (fmt_str, target_percent) == 0)
9223 len = build_int_cstu (size_type_node, strlen (fmt_str));
9224 /* If the format is "%s" and first ... argument is a string literal,
9225 we know it too. */
9226 else if (fcode == BUILT_IN_SPRINTF_CHK
9227 && strcmp (fmt_str, target_percent_s) == 0)
9228 {
9229 tree arg;
9230
9231 if (nargs < 5)
9232 return;
9233 arg = CALL_EXPR_ARG (exp, 4);
9234 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9235 return;
9236
9237 len = c_strlen (arg, 1);
9238 if (!len || ! tree_fits_uhwi_p (len))
9239 return;
9240 }
9241 else
9242 return;
9243
9244 if (! tree_int_cst_lt (len, size))
9245 warning_at (tree_nonartificial_location (exp),
9246 0, "%Kcall to %D will always overflow destination buffer",
9247 exp, get_callee_fndecl (exp));
9248 }
9249
9250 /* Emit warning if a free is called with address of a variable. */
9251
9252 static void
9253 maybe_emit_free_warning (tree exp)
9254 {
9255 tree arg = CALL_EXPR_ARG (exp, 0);
9256
9257 STRIP_NOPS (arg);
9258 if (TREE_CODE (arg) != ADDR_EXPR)
9259 return;
9260
9261 arg = get_base_address (TREE_OPERAND (arg, 0));
9262 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9263 return;
9264
9265 if (SSA_VAR_P (arg))
9266 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9267 "%Kattempt to free a non-heap object %qD", exp, arg);
9268 else
9269 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9270 "%Kattempt to free a non-heap object", exp);
9271 }
9272
9273 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9274 if possible. */
9275
9276 static tree
9277 fold_builtin_object_size (tree ptr, tree ost)
9278 {
9279 unsigned HOST_WIDE_INT bytes;
9280 int object_size_type;
9281
9282 if (!validate_arg (ptr, POINTER_TYPE)
9283 || !validate_arg (ost, INTEGER_TYPE))
9284 return NULL_TREE;
9285
9286 STRIP_NOPS (ost);
9287
9288 if (TREE_CODE (ost) != INTEGER_CST
9289 || tree_int_cst_sgn (ost) < 0
9290 || compare_tree_int (ost, 3) > 0)
9291 return NULL_TREE;
9292
9293 object_size_type = tree_to_shwi (ost);
9294
9295 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9296 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9297 and (size_t) 0 for types 2 and 3. */
9298 if (TREE_SIDE_EFFECTS (ptr))
9299 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9300
9301 if (TREE_CODE (ptr) == ADDR_EXPR)
9302 {
9303 compute_builtin_object_size (ptr, object_size_type, &bytes);
9304 if (wi::fits_to_tree_p (bytes, size_type_node))
9305 return build_int_cstu (size_type_node, bytes);
9306 }
9307 else if (TREE_CODE (ptr) == SSA_NAME)
9308 {
9309 /* If object size is not known yet, delay folding until
9310 later. Maybe subsequent passes will help determining
9311 it. */
9312 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9313 && wi::fits_to_tree_p (bytes, size_type_node))
9314 return build_int_cstu (size_type_node, bytes);
9315 }
9316
9317 return NULL_TREE;
9318 }
9319
9320 /* Builtins with folding operations that operate on "..." arguments
9321 need special handling; we need to store the arguments in a convenient
9322 data structure before attempting any folding. Fortunately there are
9323 only a few builtins that fall into this category. FNDECL is the
9324 function, EXP is the CALL_EXPR for the call. */
9325
9326 static tree
9327 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9328 {
9329 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9330 tree ret = NULL_TREE;
9331
9332 switch (fcode)
9333 {
9334 case BUILT_IN_FPCLASSIFY:
9335 ret = fold_builtin_fpclassify (loc, args, nargs);
9336 break;
9337
9338 default:
9339 break;
9340 }
9341 if (ret)
9342 {
9343 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9344 SET_EXPR_LOCATION (ret, loc);
9345 TREE_NO_WARNING (ret) = 1;
9346 return ret;
9347 }
9348 return NULL_TREE;
9349 }
9350
9351 /* Initialize format string characters in the target charset. */
9352
9353 bool
9354 init_target_chars (void)
9355 {
9356 static bool init;
9357 if (!init)
9358 {
9359 target_newline = lang_hooks.to_target_charset ('\n');
9360 target_percent = lang_hooks.to_target_charset ('%');
9361 target_c = lang_hooks.to_target_charset ('c');
9362 target_s = lang_hooks.to_target_charset ('s');
9363 if (target_newline == 0 || target_percent == 0 || target_c == 0
9364 || target_s == 0)
9365 return false;
9366
9367 target_percent_c[0] = target_percent;
9368 target_percent_c[1] = target_c;
9369 target_percent_c[2] = '\0';
9370
9371 target_percent_s[0] = target_percent;
9372 target_percent_s[1] = target_s;
9373 target_percent_s[2] = '\0';
9374
9375 target_percent_s_newline[0] = target_percent;
9376 target_percent_s_newline[1] = target_s;
9377 target_percent_s_newline[2] = target_newline;
9378 target_percent_s_newline[3] = '\0';
9379
9380 init = true;
9381 }
9382 return true;
9383 }
9384
9385 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9386 and no overflow/underflow occurred. INEXACT is true if M was not
9387 exactly calculated. TYPE is the tree type for the result. This
9388 function assumes that you cleared the MPFR flags and then
9389 calculated M to see if anything subsequently set a flag prior to
9390 entering this function. Return NULL_TREE if any checks fail. */
9391
9392 static tree
9393 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9394 {
9395 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9396 overflow/underflow occurred. If -frounding-math, proceed iff the
9397 result of calling FUNC was exact. */
9398 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9399 && (!flag_rounding_math || !inexact))
9400 {
9401 REAL_VALUE_TYPE rr;
9402
9403 real_from_mpfr (&rr, m, type, GMP_RNDN);
9404 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9405 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9406 but the mpft_t is not, then we underflowed in the
9407 conversion. */
9408 if (real_isfinite (&rr)
9409 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9410 {
9411 REAL_VALUE_TYPE rmode;
9412
9413 real_convert (&rmode, TYPE_MODE (type), &rr);
9414 /* Proceed iff the specified mode can hold the value. */
9415 if (real_identical (&rmode, &rr))
9416 return build_real (type, rmode);
9417 }
9418 }
9419 return NULL_TREE;
9420 }
9421
9422 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9423 number and no overflow/underflow occurred. INEXACT is true if M
9424 was not exactly calculated. TYPE is the tree type for the result.
9425 This function assumes that you cleared the MPFR flags and then
9426 calculated M to see if anything subsequently set a flag prior to
9427 entering this function. Return NULL_TREE if any checks fail, if
9428 FORCE_CONVERT is true, then bypass the checks. */
9429
9430 static tree
9431 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9432 {
9433 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9434 overflow/underflow occurred. If -frounding-math, proceed iff the
9435 result of calling FUNC was exact. */
9436 if (force_convert
9437 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9438 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9439 && (!flag_rounding_math || !inexact)))
9440 {
9441 REAL_VALUE_TYPE re, im;
9442
9443 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9444 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9445 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9446 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9447 but the mpft_t is not, then we underflowed in the
9448 conversion. */
9449 if (force_convert
9450 || (real_isfinite (&re) && real_isfinite (&im)
9451 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9452 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9453 {
9454 REAL_VALUE_TYPE re_mode, im_mode;
9455
9456 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9457 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9458 /* Proceed iff the specified mode can hold the value. */
9459 if (force_convert
9460 || (real_identical (&re_mode, &re)
9461 && real_identical (&im_mode, &im)))
9462 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9463 build_real (TREE_TYPE (type), im_mode));
9464 }
9465 }
9466 return NULL_TREE;
9467 }
9468
9469 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9470 the pointer *(ARG_QUO) and return the result. The type is taken
9471 from the type of ARG0 and is used for setting the precision of the
9472 calculation and results. */
9473
9474 static tree
9475 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9476 {
9477 tree const type = TREE_TYPE (arg0);
9478 tree result = NULL_TREE;
9479
9480 STRIP_NOPS (arg0);
9481 STRIP_NOPS (arg1);
9482
9483 /* To proceed, MPFR must exactly represent the target floating point
9484 format, which only happens when the target base equals two. */
9485 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9486 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9487 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9488 {
9489 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9490 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9491
9492 if (real_isfinite (ra0) && real_isfinite (ra1))
9493 {
9494 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9495 const int prec = fmt->p;
9496 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9497 tree result_rem;
9498 long integer_quo;
9499 mpfr_t m0, m1;
9500
9501 mpfr_inits2 (prec, m0, m1, NULL);
9502 mpfr_from_real (m0, ra0, GMP_RNDN);
9503 mpfr_from_real (m1, ra1, GMP_RNDN);
9504 mpfr_clear_flags ();
9505 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9506 /* Remquo is independent of the rounding mode, so pass
9507 inexact=0 to do_mpfr_ckconv(). */
9508 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9509 mpfr_clears (m0, m1, NULL);
9510 if (result_rem)
9511 {
9512 /* MPFR calculates quo in the host's long so it may
9513 return more bits in quo than the target int can hold
9514 if sizeof(host long) > sizeof(target int). This can
9515 happen even for native compilers in LP64 mode. In
9516 these cases, modulo the quo value with the largest
9517 number that the target int can hold while leaving one
9518 bit for the sign. */
9519 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9520 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9521
9522 /* Dereference the quo pointer argument. */
9523 arg_quo = build_fold_indirect_ref (arg_quo);
9524 /* Proceed iff a valid pointer type was passed in. */
9525 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9526 {
9527 /* Set the value. */
9528 tree result_quo
9529 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9530 build_int_cst (TREE_TYPE (arg_quo),
9531 integer_quo));
9532 TREE_SIDE_EFFECTS (result_quo) = 1;
9533 /* Combine the quo assignment with the rem. */
9534 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9535 result_quo, result_rem));
9536 }
9537 }
9538 }
9539 }
9540 return result;
9541 }
9542
9543 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9544 resulting value as a tree with type TYPE. The mpfr precision is
9545 set to the precision of TYPE. We assume that this mpfr function
9546 returns zero if the result could be calculated exactly within the
9547 requested precision. In addition, the integer pointer represented
9548 by ARG_SG will be dereferenced and set to the appropriate signgam
9549 (-1,1) value. */
9550
9551 static tree
9552 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9553 {
9554 tree result = NULL_TREE;
9555
9556 STRIP_NOPS (arg);
9557
9558 /* To proceed, MPFR must exactly represent the target floating point
9559 format, which only happens when the target base equals two. Also
9560 verify ARG is a constant and that ARG_SG is an int pointer. */
9561 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9562 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9563 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9564 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9565 {
9566 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9567
9568 /* In addition to NaN and Inf, the argument cannot be zero or a
9569 negative integer. */
9570 if (real_isfinite (ra)
9571 && ra->cl != rvc_zero
9572 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9573 {
9574 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9575 const int prec = fmt->p;
9576 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9577 int inexact, sg;
9578 mpfr_t m;
9579 tree result_lg;
9580
9581 mpfr_init2 (m, prec);
9582 mpfr_from_real (m, ra, GMP_RNDN);
9583 mpfr_clear_flags ();
9584 inexact = mpfr_lgamma (m, &sg, m, rnd);
9585 result_lg = do_mpfr_ckconv (m, type, inexact);
9586 mpfr_clear (m);
9587 if (result_lg)
9588 {
9589 tree result_sg;
9590
9591 /* Dereference the arg_sg pointer argument. */
9592 arg_sg = build_fold_indirect_ref (arg_sg);
9593 /* Assign the signgam value into *arg_sg. */
9594 result_sg = fold_build2 (MODIFY_EXPR,
9595 TREE_TYPE (arg_sg), arg_sg,
9596 build_int_cst (TREE_TYPE (arg_sg), sg));
9597 TREE_SIDE_EFFECTS (result_sg) = 1;
9598 /* Combine the signgam assignment with the lgamma result. */
9599 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9600 result_sg, result_lg));
9601 }
9602 }
9603 }
9604
9605 return result;
9606 }
9607
9608 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9609 mpc function FUNC on it and return the resulting value as a tree
9610 with type TYPE. The mpfr precision is set to the precision of
9611 TYPE. We assume that function FUNC returns zero if the result
9612 could be calculated exactly within the requested precision. If
9613 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9614 in the arguments and/or results. */
9615
9616 tree
9617 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9618 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9619 {
9620 tree result = NULL_TREE;
9621
9622 STRIP_NOPS (arg0);
9623 STRIP_NOPS (arg1);
9624
9625 /* To proceed, MPFR must exactly represent the target floating point
9626 format, which only happens when the target base equals two. */
9627 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9628 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9629 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9630 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9631 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9632 {
9633 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9634 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9635 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9636 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9637
9638 if (do_nonfinite
9639 || (real_isfinite (re0) && real_isfinite (im0)
9640 && real_isfinite (re1) && real_isfinite (im1)))
9641 {
9642 const struct real_format *const fmt =
9643 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9644 const int prec = fmt->p;
9645 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9646 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9647 int inexact;
9648 mpc_t m0, m1;
9649
9650 mpc_init2 (m0, prec);
9651 mpc_init2 (m1, prec);
9652 mpfr_from_real (mpc_realref (m0), re0, rnd);
9653 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9654 mpfr_from_real (mpc_realref (m1), re1, rnd);
9655 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9656 mpfr_clear_flags ();
9657 inexact = func (m0, m0, m1, crnd);
9658 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9659 mpc_clear (m0);
9660 mpc_clear (m1);
9661 }
9662 }
9663
9664 return result;
9665 }
9666
9667 /* A wrapper function for builtin folding that prevents warnings for
9668 "statement without effect" and the like, caused by removing the
9669 call node earlier than the warning is generated. */
9670
9671 tree
9672 fold_call_stmt (gcall *stmt, bool ignore)
9673 {
9674 tree ret = NULL_TREE;
9675 tree fndecl = gimple_call_fndecl (stmt);
9676 location_t loc = gimple_location (stmt);
9677 if (fndecl
9678 && TREE_CODE (fndecl) == FUNCTION_DECL
9679 && DECL_BUILT_IN (fndecl)
9680 && !gimple_call_va_arg_pack_p (stmt))
9681 {
9682 int nargs = gimple_call_num_args (stmt);
9683 tree *args = (nargs > 0
9684 ? gimple_call_arg_ptr (stmt, 0)
9685 : &error_mark_node);
9686
9687 if (avoid_folding_inline_builtin (fndecl))
9688 return NULL_TREE;
9689 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9690 {
9691 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9692 }
9693 else
9694 {
9695 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9696 if (ret)
9697 {
9698 /* Propagate location information from original call to
9699 expansion of builtin. Otherwise things like
9700 maybe_emit_chk_warning, that operate on the expansion
9701 of a builtin, will use the wrong location information. */
9702 if (gimple_has_location (stmt))
9703 {
9704 tree realret = ret;
9705 if (TREE_CODE (ret) == NOP_EXPR)
9706 realret = TREE_OPERAND (ret, 0);
9707 if (CAN_HAVE_LOCATION_P (realret)
9708 && !EXPR_HAS_LOCATION (realret))
9709 SET_EXPR_LOCATION (realret, loc);
9710 return realret;
9711 }
9712 return ret;
9713 }
9714 }
9715 }
9716 return NULL_TREE;
9717 }
9718
9719 /* Look up the function in builtin_decl that corresponds to DECL
9720 and set ASMSPEC as its user assembler name. DECL must be a
9721 function decl that declares a builtin. */
9722
9723 void
9724 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9725 {
9726 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9727 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9728 && asmspec != 0);
9729
9730 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9731 set_user_assembler_name (builtin, asmspec);
9732
9733 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
9734 && INT_TYPE_SIZE < BITS_PER_WORD)
9735 {
9736 set_user_assembler_libfunc ("ffs", asmspec);
9737 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
9738 "ffs");
9739 }
9740 }
9741
9742 /* Return true if DECL is a builtin that expands to a constant or similarly
9743 simple code. */
9744 bool
9745 is_simple_builtin (tree decl)
9746 {
9747 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9748 switch (DECL_FUNCTION_CODE (decl))
9749 {
9750 /* Builtins that expand to constants. */
9751 case BUILT_IN_CONSTANT_P:
9752 case BUILT_IN_EXPECT:
9753 case BUILT_IN_OBJECT_SIZE:
9754 case BUILT_IN_UNREACHABLE:
9755 /* Simple register moves or loads from stack. */
9756 case BUILT_IN_ASSUME_ALIGNED:
9757 case BUILT_IN_RETURN_ADDRESS:
9758 case BUILT_IN_EXTRACT_RETURN_ADDR:
9759 case BUILT_IN_FROB_RETURN_ADDR:
9760 case BUILT_IN_RETURN:
9761 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9762 case BUILT_IN_FRAME_ADDRESS:
9763 case BUILT_IN_VA_END:
9764 case BUILT_IN_STACK_SAVE:
9765 case BUILT_IN_STACK_RESTORE:
9766 /* Exception state returns or moves registers around. */
9767 case BUILT_IN_EH_FILTER:
9768 case BUILT_IN_EH_POINTER:
9769 case BUILT_IN_EH_COPY_VALUES:
9770 return true;
9771
9772 default:
9773 return false;
9774 }
9775
9776 return false;
9777 }
9778
9779 /* Return true if DECL is a builtin that is not expensive, i.e., they are
9780 most probably expanded inline into reasonably simple code. This is a
9781 superset of is_simple_builtin. */
9782 bool
9783 is_inexpensive_builtin (tree decl)
9784 {
9785 if (!decl)
9786 return false;
9787 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
9788 return true;
9789 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9790 switch (DECL_FUNCTION_CODE (decl))
9791 {
9792 case BUILT_IN_ABS:
9793 case BUILT_IN_ALLOCA:
9794 case BUILT_IN_ALLOCA_WITH_ALIGN:
9795 case BUILT_IN_BSWAP16:
9796 case BUILT_IN_BSWAP32:
9797 case BUILT_IN_BSWAP64:
9798 case BUILT_IN_CLZ:
9799 case BUILT_IN_CLZIMAX:
9800 case BUILT_IN_CLZL:
9801 case BUILT_IN_CLZLL:
9802 case BUILT_IN_CTZ:
9803 case BUILT_IN_CTZIMAX:
9804 case BUILT_IN_CTZL:
9805 case BUILT_IN_CTZLL:
9806 case BUILT_IN_FFS:
9807 case BUILT_IN_FFSIMAX:
9808 case BUILT_IN_FFSL:
9809 case BUILT_IN_FFSLL:
9810 case BUILT_IN_IMAXABS:
9811 case BUILT_IN_FINITE:
9812 case BUILT_IN_FINITEF:
9813 case BUILT_IN_FINITEL:
9814 case BUILT_IN_FINITED32:
9815 case BUILT_IN_FINITED64:
9816 case BUILT_IN_FINITED128:
9817 case BUILT_IN_FPCLASSIFY:
9818 case BUILT_IN_ISFINITE:
9819 case BUILT_IN_ISINF_SIGN:
9820 case BUILT_IN_ISINF:
9821 case BUILT_IN_ISINFF:
9822 case BUILT_IN_ISINFL:
9823 case BUILT_IN_ISINFD32:
9824 case BUILT_IN_ISINFD64:
9825 case BUILT_IN_ISINFD128:
9826 case BUILT_IN_ISNAN:
9827 case BUILT_IN_ISNANF:
9828 case BUILT_IN_ISNANL:
9829 case BUILT_IN_ISNAND32:
9830 case BUILT_IN_ISNAND64:
9831 case BUILT_IN_ISNAND128:
9832 case BUILT_IN_ISNORMAL:
9833 case BUILT_IN_ISGREATER:
9834 case BUILT_IN_ISGREATEREQUAL:
9835 case BUILT_IN_ISLESS:
9836 case BUILT_IN_ISLESSEQUAL:
9837 case BUILT_IN_ISLESSGREATER:
9838 case BUILT_IN_ISUNORDERED:
9839 case BUILT_IN_VA_ARG_PACK:
9840 case BUILT_IN_VA_ARG_PACK_LEN:
9841 case BUILT_IN_VA_COPY:
9842 case BUILT_IN_TRAP:
9843 case BUILT_IN_SAVEREGS:
9844 case BUILT_IN_POPCOUNTL:
9845 case BUILT_IN_POPCOUNTLL:
9846 case BUILT_IN_POPCOUNTIMAX:
9847 case BUILT_IN_POPCOUNT:
9848 case BUILT_IN_PARITYL:
9849 case BUILT_IN_PARITYLL:
9850 case BUILT_IN_PARITYIMAX:
9851 case BUILT_IN_PARITY:
9852 case BUILT_IN_LABS:
9853 case BUILT_IN_LLABS:
9854 case BUILT_IN_PREFETCH:
9855 case BUILT_IN_ACC_ON_DEVICE:
9856 return true;
9857
9858 default:
9859 return is_simple_builtin (decl);
9860 }
9861
9862 return false;
9863 }
9864
9865 /* Return true if T is a constant and the value cast to a target char
9866 can be represented by a host char.
9867 Store the casted char constant in *P if so. */
9868
9869 bool
9870 target_char_cst_p (tree t, char *p)
9871 {
9872 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
9873 return false;
9874
9875 *p = (char)tree_to_uhwi (t);
9876 return true;
9877 }