re PR middle-end/77624 (ICE on x86_64-linux-gnu (internal compiler error: in fold_bui...
[gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-vrp.h"
36 #include "tree-ssanames.h"
37 #include "expmed.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "recog.h"
41 #include "diagnostic-core.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "fold-const-call.h"
45 #include "stor-layout.h"
46 #include "calls.h"
47 #include "varasm.h"
48 #include "tree-object-size.h"
49 #include "realmpfr.h"
50 #include "cfgrtl.h"
51 #include "except.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "libfuncs.h"
57 #include "output.h"
58 #include "typeclass.h"
59 #include "langhooks.h"
60 #include "value-prof.h"
61 #include "builtins.h"
62 #include "asan.h"
63 #include "cilk.h"
64 #include "tree-chkp.h"
65 #include "rtl-chkp.h"
66 #include "internal-fn.h"
67 #include "case-cfn-macros.h"
68 #include "gimple-fold.h"
69
70
71 struct target_builtins default_target_builtins;
72 #if SWITCHABLE_TARGET
73 struct target_builtins *this_target_builtins = &default_target_builtins;
74 #endif
75
76 /* Define the names of the builtin function types and codes. */
77 const char *const built_in_class_names[BUILT_IN_LAST]
78 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
79
80 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
81 const char * built_in_names[(int) END_BUILTINS] =
82 {
83 #include "builtins.def"
84 };
85
86 /* Setup an array of builtin_info_type, make sure each element decl is
87 initialized to NULL_TREE. */
88 builtin_info_type builtin_info[(int)END_BUILTINS];
89
90 /* Non-zero if __builtin_constant_p should be folded right away. */
91 bool force_folding_builtin_constant_p;
92
93 static rtx c_readstr (const char *, machine_mode);
94 static int target_char_cast (tree, char *);
95 static rtx get_memory_rtx (tree, tree);
96 static int apply_args_size (void);
97 static int apply_result_size (void);
98 static rtx result_vector (int, rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
108 static rtx expand_builtin_interclass_mathfn (tree, rtx);
109 static rtx expand_builtin_sincos (tree);
110 static rtx expand_builtin_cexpi (tree, rtx);
111 static rtx expand_builtin_int_roundingfn (tree, rtx);
112 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
113 static rtx expand_builtin_next_arg (void);
114 static rtx expand_builtin_va_start (tree);
115 static rtx expand_builtin_va_end (tree);
116 static rtx expand_builtin_va_copy (tree);
117 static rtx expand_builtin_strcmp (tree, rtx);
118 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
120 static rtx expand_builtin_memcpy (tree, rtx);
121 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
122 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
123 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
124 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 machine_mode, int, tree);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, machine_mode);
133 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_strchr (location_t, tree, tree, tree);
151 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
153 static tree fold_builtin_strcmp (location_t, tree, tree);
154 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
155 static tree fold_builtin_isascii (location_t, tree);
156 static tree fold_builtin_toascii (location_t, tree);
157 static tree fold_builtin_isdigit (location_t, tree);
158 static tree fold_builtin_fabs (location_t, tree, tree);
159 static tree fold_builtin_abs (location_t, tree, tree);
160 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
161 enum tree_code);
162 static tree fold_builtin_0 (location_t, tree);
163 static tree fold_builtin_1 (location_t, tree, tree);
164 static tree fold_builtin_2 (location_t, tree, tree, tree);
165 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_varargs (location_t, tree, tree*, int);
167
168 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
169 static tree fold_builtin_strstr (location_t, tree, tree, tree);
170 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
173
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
181
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
192
193 /* Return true if NAME starts with __builtin_ or __sync_. */
194
195 static bool
196 is_builtin_name (const char *name)
197 {
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 if (flag_cilkplus
205 && (!strcmp (name, "__cilkrts_detach")
206 || !strcmp (name, "__cilkrts_pop_frame")))
207 return true;
208 return false;
209 }
210
211
212 /* Return true if DECL is a function symbol representing a built-in. */
213
214 bool
215 is_builtin_fn (tree decl)
216 {
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
218 }
219
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
223
224 bool
225 called_as_built_in (tree node)
226 {
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
232 }
233
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
238
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
245
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
248
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 {
253 HOST_WIDE_INT bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
259
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep);
264
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
268 {
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
275 }
276 else if (TREE_CODE (exp) == LABEL_DECL)
277 ;
278 else if (TREE_CODE (exp) == CONST_DECL)
279 {
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
285
286 known_alignment = true;
287 }
288 else if (DECL_P (exp))
289 {
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
292 }
293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
296 {
297 tree addr = TREE_OPERAND (exp, 0);
298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
301
302 /* If the address is explicitely aligned, handle that. */
303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 {
306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
308 align = least_bit_hwi (ptr_bitmask);
309 addr = TREE_OPERAND (addr, 0);
310 }
311
312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
314 align = MAX (ptr_align, align);
315
316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
318
319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
321 if (TREE_CODE (exp) == TARGET_MEM_REF)
322 {
323 if (TMR_INDEX (exp))
324 {
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
328 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
329 }
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
333 }
334
335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
338 alignment knowledge and if using that alignment would
339 improve the situation. */
340 if (!addr_p && !known_alignment
341 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
342 align = TYPE_ALIGN (TREE_TYPE (exp));
343 else
344 {
345 /* Else adjust bitpos accordingly. */
346 bitpos += ptr_bitpos;
347 if (TREE_CODE (exp) == MEM_REF
348 || TREE_CODE (exp) == TARGET_MEM_REF)
349 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
350 }
351 }
352 else if (TREE_CODE (exp) == STRING_CST)
353 {
354 /* STRING_CST are the only constant objects we allow to be not
355 wrapped inside a CONST_DECL. */
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 if (CONSTANT_CLASS_P (exp))
358 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
359
360 known_alignment = true;
361 }
362
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 if (offset)
366 {
367 unsigned int trailing_zeros = tree_ctz (offset);
368 if (trailing_zeros < HOST_BITS_PER_INT)
369 {
370 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
371 if (inner)
372 align = MIN (align, inner);
373 }
374 }
375
376 *alignp = align;
377 *bitposp = bitpos & (*alignp - 1);
378 return known_alignment;
379 }
380
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385
386 bool
387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
389 {
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
391 }
392
393 /* Return the alignment in bits of EXP, an object. */
394
395 unsigned int
396 get_object_alignment (tree exp)
397 {
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
400
401 get_object_alignment_1 (exp, &align, &bitpos);
402
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
405
406 if (bitpos != 0)
407 align = least_bit_hwi (bitpos);
408 return align;
409 }
410
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
415
416 If EXP is not a pointer, false is returned too. */
417
418 bool
419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
421 {
422 STRIP_NOPS (exp);
423
424 if (TREE_CODE (exp) == ADDR_EXPR)
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 {
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
436 {
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
439 {
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
443 }
444 }
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
448 }
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 {
452 unsigned int ptr_align, ptr_misalign;
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 {
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 /* We cannot really tell whether this result is an approximation. */
464 return false;
465 }
466 else
467 {
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
471 }
472 }
473 else if (TREE_CODE (exp) == INTEGER_CST)
474 {
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
479 }
480
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
484 }
485
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
492
493 unsigned int
494 get_pointer_alignment (tree exp)
495 {
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
498
499 get_pointer_alignment_1 (exp, &align, &bitpos);
500
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
503
504 if (bitpos != 0)
505 align = least_bit_hwi (bitpos);
506
507 return align;
508 }
509
510 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
511 way, because it could contain a zero byte in the middle.
512 TREE_STRING_LENGTH is the size of the character array, not the string.
513
514 ONLY_VALUE should be nonzero if the result is not going to be emitted
515 into the instruction stream and zero if it is going to be expanded.
516 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
517 is returned, otherwise NULL, since
518 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
519 evaluate the side-effects.
520
521 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
522 accesses. Note that this implies the result is not going to be emitted
523 into the instruction stream.
524
525 The value returned is of type `ssizetype'.
526
527 Unfortunately, string_constant can't access the values of const char
528 arrays with initializers, so neither can we do so here. */
529
530 tree
531 c_strlen (tree src, int only_value)
532 {
533 tree offset_node;
534 HOST_WIDE_INT offset;
535 int max;
536 const char *ptr;
537 location_t loc;
538
539 STRIP_NOPS (src);
540 if (TREE_CODE (src) == COND_EXPR
541 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
542 {
543 tree len1, len2;
544
545 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
546 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
547 if (tree_int_cst_equal (len1, len2))
548 return len1;
549 }
550
551 if (TREE_CODE (src) == COMPOUND_EXPR
552 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
553 return c_strlen (TREE_OPERAND (src, 1), only_value);
554
555 loc = EXPR_LOC_OR_LOC (src, input_location);
556
557 src = string_constant (src, &offset_node);
558 if (src == 0)
559 return NULL_TREE;
560
561 max = TREE_STRING_LENGTH (src) - 1;
562 ptr = TREE_STRING_POINTER (src);
563
564 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
565 {
566 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
567 compute the offset to the following null if we don't know where to
568 start searching for it. */
569 int i;
570
571 for (i = 0; i < max; i++)
572 if (ptr[i] == 0)
573 return NULL_TREE;
574
575 /* We don't know the starting offset, but we do know that the string
576 has no internal zero bytes. We can assume that the offset falls
577 within the bounds of the string; otherwise, the programmer deserves
578 what he gets. Subtract the offset from the length of the string,
579 and return that. This would perhaps not be valid if we were dealing
580 with named arrays in addition to literal string constants. */
581
582 return size_diffop_loc (loc, size_int (max), offset_node);
583 }
584
585 /* We have a known offset into the string. Start searching there for
586 a null character if we can represent it as a single HOST_WIDE_INT. */
587 if (offset_node == 0)
588 offset = 0;
589 else if (! tree_fits_shwi_p (offset_node))
590 offset = -1;
591 else
592 offset = tree_to_shwi (offset_node);
593
594 /* If the offset is known to be out of bounds, warn, and call strlen at
595 runtime. */
596 if (offset < 0 || offset > max)
597 {
598 /* Suppress multiple warnings for propagated constant strings. */
599 if (only_value != 2
600 && !TREE_NO_WARNING (src))
601 {
602 warning_at (loc, 0, "offset outside bounds of constant string");
603 TREE_NO_WARNING (src) = 1;
604 }
605 return NULL_TREE;
606 }
607
608 /* Use strlen to search for the first zero byte. Since any strings
609 constructed with build_string will have nulls appended, we win even
610 if we get handed something like (char[4])"abcd".
611
612 Since OFFSET is our starting index into the string, no further
613 calculation is needed. */
614 return ssize_int (strlen (ptr + offset));
615 }
616
617 /* Return a constant integer corresponding to target reading
618 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
619
620 static rtx
621 c_readstr (const char *str, machine_mode mode)
622 {
623 HOST_WIDE_INT ch;
624 unsigned int i, j;
625 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
626
627 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
628 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
629 / HOST_BITS_PER_WIDE_INT;
630
631 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
632 for (i = 0; i < len; i++)
633 tmp[i] = 0;
634
635 ch = 1;
636 for (i = 0; i < GET_MODE_SIZE (mode); i++)
637 {
638 j = i;
639 if (WORDS_BIG_ENDIAN)
640 j = GET_MODE_SIZE (mode) - i - 1;
641 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
642 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
643 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
644 j *= BITS_PER_UNIT;
645
646 if (ch)
647 ch = (unsigned char) str[i];
648 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
649 }
650
651 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
652 return immed_wide_int_const (c, mode);
653 }
654
655 /* Cast a target constant CST to target CHAR and if that value fits into
656 host char type, return zero and put that value into variable pointed to by
657 P. */
658
659 static int
660 target_char_cast (tree cst, char *p)
661 {
662 unsigned HOST_WIDE_INT val, hostval;
663
664 if (TREE_CODE (cst) != INTEGER_CST
665 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
666 return 1;
667
668 /* Do not care if it fits or not right here. */
669 val = TREE_INT_CST_LOW (cst);
670
671 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
672 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
673
674 hostval = val;
675 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
676 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
677
678 if (val != hostval)
679 return 1;
680
681 *p = hostval;
682 return 0;
683 }
684
685 /* Similar to save_expr, but assumes that arbitrary code is not executed
686 in between the multiple evaluations. In particular, we assume that a
687 non-addressable local variable will not be modified. */
688
689 static tree
690 builtin_save_expr (tree exp)
691 {
692 if (TREE_CODE (exp) == SSA_NAME
693 || (TREE_ADDRESSABLE (exp) == 0
694 && (TREE_CODE (exp) == PARM_DECL
695 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
696 return exp;
697
698 return save_expr (exp);
699 }
700
701 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
702 times to get the address of either a higher stack frame, or a return
703 address located within it (depending on FNDECL_CODE). */
704
705 static rtx
706 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
707 {
708 int i;
709 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
710 if (tem == NULL_RTX)
711 {
712 /* For a zero count with __builtin_return_address, we don't care what
713 frame address we return, because target-specific definitions will
714 override us. Therefore frame pointer elimination is OK, and using
715 the soft frame pointer is OK.
716
717 For a nonzero count, or a zero count with __builtin_frame_address,
718 we require a stable offset from the current frame pointer to the
719 previous one, so we must use the hard frame pointer, and
720 we must disable frame pointer elimination. */
721 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
722 tem = frame_pointer_rtx;
723 else
724 {
725 tem = hard_frame_pointer_rtx;
726
727 /* Tell reload not to eliminate the frame pointer. */
728 crtl->accesses_prior_frames = 1;
729 }
730 }
731
732 if (count > 0)
733 SETUP_FRAME_ADDRESSES ();
734
735 /* On the SPARC, the return address is not in the frame, it is in a
736 register. There is no way to access it off of the current frame
737 pointer, but it can be accessed off the previous frame pointer by
738 reading the value from the register window save area. */
739 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
740 count--;
741
742 /* Scan back COUNT frames to the specified frame. */
743 for (i = 0; i < count; i++)
744 {
745 /* Assume the dynamic chain pointer is in the word that the
746 frame address points to, unless otherwise specified. */
747 tem = DYNAMIC_CHAIN_ADDRESS (tem);
748 tem = memory_address (Pmode, tem);
749 tem = gen_frame_mem (Pmode, tem);
750 tem = copy_to_reg (tem);
751 }
752
753 /* For __builtin_frame_address, return what we've got. But, on
754 the SPARC for example, we may have to add a bias. */
755 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
756 return FRAME_ADDR_RTX (tem);
757
758 /* For __builtin_return_address, get the return address from that frame. */
759 #ifdef RETURN_ADDR_RTX
760 tem = RETURN_ADDR_RTX (count, tem);
761 #else
762 tem = memory_address (Pmode,
763 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
764 tem = gen_frame_mem (Pmode, tem);
765 #endif
766 return tem;
767 }
768
769 /* Alias set used for setjmp buffer. */
770 static alias_set_type setjmp_alias_set = -1;
771
772 /* Construct the leading half of a __builtin_setjmp call. Control will
773 return to RECEIVER_LABEL. This is also called directly by the SJLJ
774 exception handling code. */
775
776 void
777 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
778 {
779 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
780 rtx stack_save;
781 rtx mem;
782
783 if (setjmp_alias_set == -1)
784 setjmp_alias_set = new_alias_set ();
785
786 buf_addr = convert_memory_address (Pmode, buf_addr);
787
788 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
789
790 /* We store the frame pointer and the address of receiver_label in
791 the buffer and use the rest of it for the stack save area, which
792 is machine-dependent. */
793
794 mem = gen_rtx_MEM (Pmode, buf_addr);
795 set_mem_alias_set (mem, setjmp_alias_set);
796 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
797
798 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
799 GET_MODE_SIZE (Pmode))),
800 set_mem_alias_set (mem, setjmp_alias_set);
801
802 emit_move_insn (validize_mem (mem),
803 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
804
805 stack_save = gen_rtx_MEM (sa_mode,
806 plus_constant (Pmode, buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (stack_save, setjmp_alias_set);
809 emit_stack_save (SAVE_NONLOCAL, &stack_save);
810
811 /* If there is further processing to do, do it. */
812 if (targetm.have_builtin_setjmp_setup ())
813 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
814
815 /* We have a nonlocal label. */
816 cfun->has_nonlocal_label = 1;
817 }
818
819 /* Construct the trailing part of a __builtin_setjmp call. This is
820 also called directly by the SJLJ exception handling code.
821 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
822
823 void
824 expand_builtin_setjmp_receiver (rtx receiver_label)
825 {
826 rtx chain;
827
828 /* Mark the FP as used when we get here, so we have to make sure it's
829 marked as used by this function. */
830 emit_use (hard_frame_pointer_rtx);
831
832 /* Mark the static chain as clobbered here so life information
833 doesn't get messed up for it. */
834 chain = targetm.calls.static_chain (current_function_decl, true);
835 if (chain && REG_P (chain))
836 emit_clobber (chain);
837
838 /* Now put in the code to restore the frame pointer, and argument
839 pointer, if needed. */
840 if (! targetm.have_nonlocal_goto ())
841 {
842 /* First adjust our frame pointer to its actual value. It was
843 previously set to the start of the virtual area corresponding to
844 the stacked variables when we branched here and now needs to be
845 adjusted to the actual hardware fp value.
846
847 Assignments to virtual registers are converted by
848 instantiate_virtual_regs into the corresponding assignment
849 to the underlying register (fp in this case) that makes
850 the original assignment true.
851 So the following insn will actually be decrementing fp by
852 STARTING_FRAME_OFFSET. */
853 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
854
855 /* Restoring the frame pointer also modifies the hard frame pointer.
856 Mark it used (so that the previous assignment remains live once
857 the frame pointer is eliminated) and clobbered (to represent the
858 implicit update from the assignment). */
859 emit_use (hard_frame_pointer_rtx);
860 emit_clobber (hard_frame_pointer_rtx);
861 }
862
863 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
864 {
865 /* If the argument pointer can be eliminated in favor of the
866 frame pointer, we don't need to restore it. We assume here
867 that if such an elimination is present, it can always be used.
868 This is the case on all known machines; if we don't make this
869 assumption, we do unnecessary saving on many machines. */
870 size_t i;
871 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
872
873 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
874 if (elim_regs[i].from == ARG_POINTER_REGNUM
875 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
876 break;
877
878 if (i == ARRAY_SIZE (elim_regs))
879 {
880 /* Now restore our arg pointer from the address at which it
881 was saved in our stack frame. */
882 emit_move_insn (crtl->args.internal_arg_pointer,
883 copy_to_reg (get_arg_pointer_save_area ()));
884 }
885 }
886
887 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
888 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
889 else if (targetm.have_nonlocal_goto_receiver ())
890 emit_insn (targetm.gen_nonlocal_goto_receiver ());
891 else
892 { /* Nothing */ }
893
894 /* We must not allow the code we just generated to be reordered by
895 scheduling. Specifically, the update of the frame pointer must
896 happen immediately, not later. */
897 emit_insn (gen_blockage ());
898 }
899
900 /* __builtin_longjmp is passed a pointer to an array of five words (not
901 all will be used on all machines). It operates similarly to the C
902 library function of the same name, but is more efficient. Much of
903 the code below is copied from the handling of non-local gotos. */
904
905 static void
906 expand_builtin_longjmp (rtx buf_addr, rtx value)
907 {
908 rtx fp, lab, stack;
909 rtx_insn *insn, *last;
910 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
911
912 /* DRAP is needed for stack realign if longjmp is expanded to current
913 function */
914 if (SUPPORTS_STACK_ALIGNMENT)
915 crtl->need_drap = true;
916
917 if (setjmp_alias_set == -1)
918 setjmp_alias_set = new_alias_set ();
919
920 buf_addr = convert_memory_address (Pmode, buf_addr);
921
922 buf_addr = force_reg (Pmode, buf_addr);
923
924 /* We require that the user must pass a second argument of 1, because
925 that is what builtin_setjmp will return. */
926 gcc_assert (value == const1_rtx);
927
928 last = get_last_insn ();
929 if (targetm.have_builtin_longjmp ())
930 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
931 else
932 {
933 fp = gen_rtx_MEM (Pmode, buf_addr);
934 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
935 GET_MODE_SIZE (Pmode)));
936
937 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
938 2 * GET_MODE_SIZE (Pmode)));
939 set_mem_alias_set (fp, setjmp_alias_set);
940 set_mem_alias_set (lab, setjmp_alias_set);
941 set_mem_alias_set (stack, setjmp_alias_set);
942
943 /* Pick up FP, label, and SP from the block and jump. This code is
944 from expand_goto in stmt.c; see there for detailed comments. */
945 if (targetm.have_nonlocal_goto ())
946 /* We have to pass a value to the nonlocal_goto pattern that will
947 get copied into the static_chain pointer, but it does not matter
948 what that value is, because builtin_setjmp does not use it. */
949 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
950 else
951 {
952 lab = copy_to_reg (lab);
953
954 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
955 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
956
957 emit_move_insn (hard_frame_pointer_rtx, fp);
958 emit_stack_restore (SAVE_NONLOCAL, stack);
959
960 emit_use (hard_frame_pointer_rtx);
961 emit_use (stack_pointer_rtx);
962 emit_indirect_jump (lab);
963 }
964 }
965
966 /* Search backwards and mark the jump insn as a non-local goto.
967 Note that this precludes the use of __builtin_longjmp to a
968 __builtin_setjmp target in the same function. However, we've
969 already cautioned the user that these functions are for
970 internal exception handling use only. */
971 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
972 {
973 gcc_assert (insn != last);
974
975 if (JUMP_P (insn))
976 {
977 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
978 break;
979 }
980 else if (CALL_P (insn))
981 break;
982 }
983 }
984
985 static inline bool
986 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
987 {
988 return (iter->i < iter->n);
989 }
990
991 /* This function validates the types of a function call argument list
992 against a specified list of tree_codes. If the last specifier is a 0,
993 that represents an ellipses, otherwise the last specifier must be a
994 VOID_TYPE. */
995
996 static bool
997 validate_arglist (const_tree callexpr, ...)
998 {
999 enum tree_code code;
1000 bool res = 0;
1001 va_list ap;
1002 const_call_expr_arg_iterator iter;
1003 const_tree arg;
1004
1005 va_start (ap, callexpr);
1006 init_const_call_expr_arg_iterator (callexpr, &iter);
1007
1008 do
1009 {
1010 code = (enum tree_code) va_arg (ap, int);
1011 switch (code)
1012 {
1013 case 0:
1014 /* This signifies an ellipses, any further arguments are all ok. */
1015 res = true;
1016 goto end;
1017 case VOID_TYPE:
1018 /* This signifies an endlink, if no arguments remain, return
1019 true, otherwise return false. */
1020 res = !more_const_call_expr_args_p (&iter);
1021 goto end;
1022 default:
1023 /* If no parameters remain or the parameter's code does not
1024 match the specified code, return false. Otherwise continue
1025 checking any remaining arguments. */
1026 arg = next_const_call_expr_arg (&iter);
1027 if (!validate_arg (arg, code))
1028 goto end;
1029 break;
1030 }
1031 }
1032 while (1);
1033
1034 /* We need gotos here since we can only have one VA_CLOSE in a
1035 function. */
1036 end: ;
1037 va_end (ap);
1038
1039 return res;
1040 }
1041
1042 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1043 and the address of the save area. */
1044
1045 static rtx
1046 expand_builtin_nonlocal_goto (tree exp)
1047 {
1048 tree t_label, t_save_area;
1049 rtx r_label, r_save_area, r_fp, r_sp;
1050 rtx_insn *insn;
1051
1052 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1053 return NULL_RTX;
1054
1055 t_label = CALL_EXPR_ARG (exp, 0);
1056 t_save_area = CALL_EXPR_ARG (exp, 1);
1057
1058 r_label = expand_normal (t_label);
1059 r_label = convert_memory_address (Pmode, r_label);
1060 r_save_area = expand_normal (t_save_area);
1061 r_save_area = convert_memory_address (Pmode, r_save_area);
1062 /* Copy the address of the save location to a register just in case it was
1063 based on the frame pointer. */
1064 r_save_area = copy_to_reg (r_save_area);
1065 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1066 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1067 plus_constant (Pmode, r_save_area,
1068 GET_MODE_SIZE (Pmode)));
1069
1070 crtl->has_nonlocal_goto = 1;
1071
1072 /* ??? We no longer need to pass the static chain value, afaik. */
1073 if (targetm.have_nonlocal_goto ())
1074 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1075 else
1076 {
1077 r_label = copy_to_reg (r_label);
1078
1079 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1080 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1081
1082 /* Restore frame pointer for containing function. */
1083 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1084 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1085
1086 /* USE of hard_frame_pointer_rtx added for consistency;
1087 not clear if really needed. */
1088 emit_use (hard_frame_pointer_rtx);
1089 emit_use (stack_pointer_rtx);
1090
1091 /* If the architecture is using a GP register, we must
1092 conservatively assume that the target function makes use of it.
1093 The prologue of functions with nonlocal gotos must therefore
1094 initialize the GP register to the appropriate value, and we
1095 must then make sure that this value is live at the point
1096 of the jump. (Note that this doesn't necessarily apply
1097 to targets with a nonlocal_goto pattern; they are free
1098 to implement it in their own way. Note also that this is
1099 a no-op if the GP register is a global invariant.) */
1100 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1101 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1102 emit_use (pic_offset_table_rtx);
1103
1104 emit_indirect_jump (r_label);
1105 }
1106
1107 /* Search backwards to the jump insn and mark it as a
1108 non-local goto. */
1109 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1110 {
1111 if (JUMP_P (insn))
1112 {
1113 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1114 break;
1115 }
1116 else if (CALL_P (insn))
1117 break;
1118 }
1119
1120 return const0_rtx;
1121 }
1122
1123 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1124 (not all will be used on all machines) that was passed to __builtin_setjmp.
1125 It updates the stack pointer in that block to the current value. This is
1126 also called directly by the SJLJ exception handling code. */
1127
1128 void
1129 expand_builtin_update_setjmp_buf (rtx buf_addr)
1130 {
1131 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1132 rtx stack_save
1133 = gen_rtx_MEM (sa_mode,
1134 memory_address
1135 (sa_mode,
1136 plus_constant (Pmode, buf_addr,
1137 2 * GET_MODE_SIZE (Pmode))));
1138
1139 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1140 }
1141
1142 /* Expand a call to __builtin_prefetch. For a target that does not support
1143 data prefetch, evaluate the memory address argument in case it has side
1144 effects. */
1145
1146 static void
1147 expand_builtin_prefetch (tree exp)
1148 {
1149 tree arg0, arg1, arg2;
1150 int nargs;
1151 rtx op0, op1, op2;
1152
1153 if (!validate_arglist (exp, POINTER_TYPE, 0))
1154 return;
1155
1156 arg0 = CALL_EXPR_ARG (exp, 0);
1157
1158 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1159 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1160 locality). */
1161 nargs = call_expr_nargs (exp);
1162 if (nargs > 1)
1163 arg1 = CALL_EXPR_ARG (exp, 1);
1164 else
1165 arg1 = integer_zero_node;
1166 if (nargs > 2)
1167 arg2 = CALL_EXPR_ARG (exp, 2);
1168 else
1169 arg2 = integer_three_node;
1170
1171 /* Argument 0 is an address. */
1172 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1173
1174 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1175 if (TREE_CODE (arg1) != INTEGER_CST)
1176 {
1177 error ("second argument to %<__builtin_prefetch%> must be a constant");
1178 arg1 = integer_zero_node;
1179 }
1180 op1 = expand_normal (arg1);
1181 /* Argument 1 must be either zero or one. */
1182 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1183 {
1184 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1185 " using zero");
1186 op1 = const0_rtx;
1187 }
1188
1189 /* Argument 2 (locality) must be a compile-time constant int. */
1190 if (TREE_CODE (arg2) != INTEGER_CST)
1191 {
1192 error ("third argument to %<__builtin_prefetch%> must be a constant");
1193 arg2 = integer_zero_node;
1194 }
1195 op2 = expand_normal (arg2);
1196 /* Argument 2 must be 0, 1, 2, or 3. */
1197 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1198 {
1199 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1200 op2 = const0_rtx;
1201 }
1202
1203 if (targetm.have_prefetch ())
1204 {
1205 struct expand_operand ops[3];
1206
1207 create_address_operand (&ops[0], op0);
1208 create_integer_operand (&ops[1], INTVAL (op1));
1209 create_integer_operand (&ops[2], INTVAL (op2));
1210 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1211 return;
1212 }
1213
1214 /* Don't do anything with direct references to volatile memory, but
1215 generate code to handle other side effects. */
1216 if (!MEM_P (op0) && side_effects_p (op0))
1217 emit_insn (op0);
1218 }
1219
1220 /* Get a MEM rtx for expression EXP which is the address of an operand
1221 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1222 the maximum length of the block of memory that might be accessed or
1223 NULL if unknown. */
1224
1225 static rtx
1226 get_memory_rtx (tree exp, tree len)
1227 {
1228 tree orig_exp = exp;
1229 rtx addr, mem;
1230
1231 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1232 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1233 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1234 exp = TREE_OPERAND (exp, 0);
1235
1236 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1237 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1238
1239 /* Get an expression we can use to find the attributes to assign to MEM.
1240 First remove any nops. */
1241 while (CONVERT_EXPR_P (exp)
1242 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1243 exp = TREE_OPERAND (exp, 0);
1244
1245 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1246 (as builtin stringops may alias with anything). */
1247 exp = fold_build2 (MEM_REF,
1248 build_array_type (char_type_node,
1249 build_range_type (sizetype,
1250 size_one_node, len)),
1251 exp, build_int_cst (ptr_type_node, 0));
1252
1253 /* If the MEM_REF has no acceptable address, try to get the base object
1254 from the original address we got, and build an all-aliasing
1255 unknown-sized access to that one. */
1256 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1257 set_mem_attributes (mem, exp, 0);
1258 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1259 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1260 0))))
1261 {
1262 exp = build_fold_addr_expr (exp);
1263 exp = fold_build2 (MEM_REF,
1264 build_array_type (char_type_node,
1265 build_range_type (sizetype,
1266 size_zero_node,
1267 NULL)),
1268 exp, build_int_cst (ptr_type_node, 0));
1269 set_mem_attributes (mem, exp, 0);
1270 }
1271 set_mem_alias_set (mem, 0);
1272 return mem;
1273 }
1274 \f
1275 /* Built-in functions to perform an untyped call and return. */
1276
1277 #define apply_args_mode \
1278 (this_target_builtins->x_apply_args_mode)
1279 #define apply_result_mode \
1280 (this_target_builtins->x_apply_result_mode)
1281
1282 /* Return the size required for the block returned by __builtin_apply_args,
1283 and initialize apply_args_mode. */
1284
1285 static int
1286 apply_args_size (void)
1287 {
1288 static int size = -1;
1289 int align;
1290 unsigned int regno;
1291 machine_mode mode;
1292
1293 /* The values computed by this function never change. */
1294 if (size < 0)
1295 {
1296 /* The first value is the incoming arg-pointer. */
1297 size = GET_MODE_SIZE (Pmode);
1298
1299 /* The second value is the structure value address unless this is
1300 passed as an "invisible" first argument. */
1301 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1302 size += GET_MODE_SIZE (Pmode);
1303
1304 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1305 if (FUNCTION_ARG_REGNO_P (regno))
1306 {
1307 mode = targetm.calls.get_raw_arg_mode (regno);
1308
1309 gcc_assert (mode != VOIDmode);
1310
1311 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1312 if (size % align != 0)
1313 size = CEIL (size, align) * align;
1314 size += GET_MODE_SIZE (mode);
1315 apply_args_mode[regno] = mode;
1316 }
1317 else
1318 {
1319 apply_args_mode[regno] = VOIDmode;
1320 }
1321 }
1322 return size;
1323 }
1324
1325 /* Return the size required for the block returned by __builtin_apply,
1326 and initialize apply_result_mode. */
1327
1328 static int
1329 apply_result_size (void)
1330 {
1331 static int size = -1;
1332 int align, regno;
1333 machine_mode mode;
1334
1335 /* The values computed by this function never change. */
1336 if (size < 0)
1337 {
1338 size = 0;
1339
1340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1341 if (targetm.calls.function_value_regno_p (regno))
1342 {
1343 mode = targetm.calls.get_raw_result_mode (regno);
1344
1345 gcc_assert (mode != VOIDmode);
1346
1347 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1348 if (size % align != 0)
1349 size = CEIL (size, align) * align;
1350 size += GET_MODE_SIZE (mode);
1351 apply_result_mode[regno] = mode;
1352 }
1353 else
1354 apply_result_mode[regno] = VOIDmode;
1355
1356 /* Allow targets that use untyped_call and untyped_return to override
1357 the size so that machine-specific information can be stored here. */
1358 #ifdef APPLY_RESULT_SIZE
1359 size = APPLY_RESULT_SIZE;
1360 #endif
1361 }
1362 return size;
1363 }
1364
1365 /* Create a vector describing the result block RESULT. If SAVEP is true,
1366 the result block is used to save the values; otherwise it is used to
1367 restore the values. */
1368
1369 static rtx
1370 result_vector (int savep, rtx result)
1371 {
1372 int regno, size, align, nelts;
1373 machine_mode mode;
1374 rtx reg, mem;
1375 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1376
1377 size = nelts = 0;
1378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1379 if ((mode = apply_result_mode[regno]) != VOIDmode)
1380 {
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1385 mem = adjust_address (result, mode, size);
1386 savevec[nelts++] = (savep
1387 ? gen_rtx_SET (mem, reg)
1388 : gen_rtx_SET (reg, mem));
1389 size += GET_MODE_SIZE (mode);
1390 }
1391 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1392 }
1393
1394 /* Save the state required to perform an untyped call with the same
1395 arguments as were passed to the current function. */
1396
1397 static rtx
1398 expand_builtin_apply_args_1 (void)
1399 {
1400 rtx registers, tem;
1401 int size, align, regno;
1402 machine_mode mode;
1403 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1404
1405 /* Create a block where the arg-pointer, structure value address,
1406 and argument registers can be saved. */
1407 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1408
1409 /* Walk past the arg-pointer and structure value address. */
1410 size = GET_MODE_SIZE (Pmode);
1411 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1412 size += GET_MODE_SIZE (Pmode);
1413
1414 /* Save each register used in calling a function to the block. */
1415 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1416 if ((mode = apply_args_mode[regno]) != VOIDmode)
1417 {
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421
1422 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1423
1424 emit_move_insn (adjust_address (registers, mode, size), tem);
1425 size += GET_MODE_SIZE (mode);
1426 }
1427
1428 /* Save the arg pointer to the block. */
1429 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1430 /* We need the pointer as the caller actually passed them to us, not
1431 as we might have pretended they were passed. Make sure it's a valid
1432 operand, as emit_move_insn isn't expected to handle a PLUS. */
1433 if (STACK_GROWS_DOWNWARD)
1434 tem
1435 = force_operand (plus_constant (Pmode, tem,
1436 crtl->args.pretend_args_size),
1437 NULL_RTX);
1438 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1439
1440 size = GET_MODE_SIZE (Pmode);
1441
1442 /* Save the structure value address unless this is passed as an
1443 "invisible" first argument. */
1444 if (struct_incoming_value)
1445 {
1446 emit_move_insn (adjust_address (registers, Pmode, size),
1447 copy_to_reg (struct_incoming_value));
1448 size += GET_MODE_SIZE (Pmode);
1449 }
1450
1451 /* Return the address of the block. */
1452 return copy_addr_to_reg (XEXP (registers, 0));
1453 }
1454
1455 /* __builtin_apply_args returns block of memory allocated on
1456 the stack into which is stored the arg pointer, structure
1457 value address, static chain, and all the registers that might
1458 possibly be used in performing a function call. The code is
1459 moved to the start of the function so the incoming values are
1460 saved. */
1461
1462 static rtx
1463 expand_builtin_apply_args (void)
1464 {
1465 /* Don't do __builtin_apply_args more than once in a function.
1466 Save the result of the first call and reuse it. */
1467 if (apply_args_value != 0)
1468 return apply_args_value;
1469 {
1470 /* When this function is called, it means that registers must be
1471 saved on entry to this function. So we migrate the
1472 call to the first insn of this function. */
1473 rtx temp;
1474
1475 start_sequence ();
1476 temp = expand_builtin_apply_args_1 ();
1477 rtx_insn *seq = get_insns ();
1478 end_sequence ();
1479
1480 apply_args_value = temp;
1481
1482 /* Put the insns after the NOTE that starts the function.
1483 If this is inside a start_sequence, make the outer-level insn
1484 chain current, so the code is placed at the start of the
1485 function. If internal_arg_pointer is a non-virtual pseudo,
1486 it needs to be placed after the function that initializes
1487 that pseudo. */
1488 push_topmost_sequence ();
1489 if (REG_P (crtl->args.internal_arg_pointer)
1490 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1491 emit_insn_before (seq, parm_birth_insn);
1492 else
1493 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1494 pop_topmost_sequence ();
1495 return temp;
1496 }
1497 }
1498
1499 /* Perform an untyped call and save the state required to perform an
1500 untyped return of whatever value was returned by the given function. */
1501
1502 static rtx
1503 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1504 {
1505 int size, align, regno;
1506 machine_mode mode;
1507 rtx incoming_args, result, reg, dest, src;
1508 rtx_call_insn *call_insn;
1509 rtx old_stack_level = 0;
1510 rtx call_fusage = 0;
1511 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1512
1513 arguments = convert_memory_address (Pmode, arguments);
1514
1515 /* Create a block where the return registers can be saved. */
1516 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1517
1518 /* Fetch the arg pointer from the ARGUMENTS block. */
1519 incoming_args = gen_reg_rtx (Pmode);
1520 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1521 if (!STACK_GROWS_DOWNWARD)
1522 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1523 incoming_args, 0, OPTAB_LIB_WIDEN);
1524
1525 /* Push a new argument block and copy the arguments. Do not allow
1526 the (potential) memcpy call below to interfere with our stack
1527 manipulations. */
1528 do_pending_stack_adjust ();
1529 NO_DEFER_POP;
1530
1531 /* Save the stack with nonlocal if available. */
1532 if (targetm.have_save_stack_nonlocal ())
1533 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1534 else
1535 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1536
1537 /* Allocate a block of memory onto the stack and copy the memory
1538 arguments to the outgoing arguments address. We can pass TRUE
1539 as the 4th argument because we just saved the stack pointer
1540 and will restore it right after the call. */
1541 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1542
1543 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1544 may have already set current_function_calls_alloca to true.
1545 current_function_calls_alloca won't be set if argsize is zero,
1546 so we have to guarantee need_drap is true here. */
1547 if (SUPPORTS_STACK_ALIGNMENT)
1548 crtl->need_drap = true;
1549
1550 dest = virtual_outgoing_args_rtx;
1551 if (!STACK_GROWS_DOWNWARD)
1552 {
1553 if (CONST_INT_P (argsize))
1554 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1555 else
1556 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1557 }
1558 dest = gen_rtx_MEM (BLKmode, dest);
1559 set_mem_align (dest, PARM_BOUNDARY);
1560 src = gen_rtx_MEM (BLKmode, incoming_args);
1561 set_mem_align (src, PARM_BOUNDARY);
1562 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1563
1564 /* Refer to the argument block. */
1565 apply_args_size ();
1566 arguments = gen_rtx_MEM (BLKmode, arguments);
1567 set_mem_align (arguments, PARM_BOUNDARY);
1568
1569 /* Walk past the arg-pointer and structure value address. */
1570 size = GET_MODE_SIZE (Pmode);
1571 if (struct_value)
1572 size += GET_MODE_SIZE (Pmode);
1573
1574 /* Restore each of the registers previously saved. Make USE insns
1575 for each of these registers for use in making the call. */
1576 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1577 if ((mode = apply_args_mode[regno]) != VOIDmode)
1578 {
1579 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1580 if (size % align != 0)
1581 size = CEIL (size, align) * align;
1582 reg = gen_rtx_REG (mode, regno);
1583 emit_move_insn (reg, adjust_address (arguments, mode, size));
1584 use_reg (&call_fusage, reg);
1585 size += GET_MODE_SIZE (mode);
1586 }
1587
1588 /* Restore the structure value address unless this is passed as an
1589 "invisible" first argument. */
1590 size = GET_MODE_SIZE (Pmode);
1591 if (struct_value)
1592 {
1593 rtx value = gen_reg_rtx (Pmode);
1594 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1595 emit_move_insn (struct_value, value);
1596 if (REG_P (struct_value))
1597 use_reg (&call_fusage, struct_value);
1598 size += GET_MODE_SIZE (Pmode);
1599 }
1600
1601 /* All arguments and registers used for the call are set up by now! */
1602 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1603
1604 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1605 and we don't want to load it into a register as an optimization,
1606 because prepare_call_address already did it if it should be done. */
1607 if (GET_CODE (function) != SYMBOL_REF)
1608 function = memory_address (FUNCTION_MODE, function);
1609
1610 /* Generate the actual call instruction and save the return value. */
1611 if (targetm.have_untyped_call ())
1612 {
1613 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1614 emit_call_insn (targetm.gen_untyped_call (mem, result,
1615 result_vector (1, result)));
1616 }
1617 else if (targetm.have_call_value ())
1618 {
1619 rtx valreg = 0;
1620
1621 /* Locate the unique return register. It is not possible to
1622 express a call that sets more than one return register using
1623 call_value; use untyped_call for that. In fact, untyped_call
1624 only needs to save the return registers in the given block. */
1625 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1626 if ((mode = apply_result_mode[regno]) != VOIDmode)
1627 {
1628 gcc_assert (!valreg); /* have_untyped_call required. */
1629
1630 valreg = gen_rtx_REG (mode, regno);
1631 }
1632
1633 emit_insn (targetm.gen_call_value (valreg,
1634 gen_rtx_MEM (FUNCTION_MODE, function),
1635 const0_rtx, NULL_RTX, const0_rtx));
1636
1637 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1638 }
1639 else
1640 gcc_unreachable ();
1641
1642 /* Find the CALL insn we just emitted, and attach the register usage
1643 information. */
1644 call_insn = last_call_insn ();
1645 add_function_usage_to (call_insn, call_fusage);
1646
1647 /* Restore the stack. */
1648 if (targetm.have_save_stack_nonlocal ())
1649 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1650 else
1651 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1652 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1653
1654 OK_DEFER_POP;
1655
1656 /* Return the address of the result block. */
1657 result = copy_addr_to_reg (XEXP (result, 0));
1658 return convert_memory_address (ptr_mode, result);
1659 }
1660
1661 /* Perform an untyped return. */
1662
1663 static void
1664 expand_builtin_return (rtx result)
1665 {
1666 int size, align, regno;
1667 machine_mode mode;
1668 rtx reg;
1669 rtx_insn *call_fusage = 0;
1670
1671 result = convert_memory_address (Pmode, result);
1672
1673 apply_result_size ();
1674 result = gen_rtx_MEM (BLKmode, result);
1675
1676 if (targetm.have_untyped_return ())
1677 {
1678 rtx vector = result_vector (0, result);
1679 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1680 emit_barrier ();
1681 return;
1682 }
1683
1684 /* Restore the return value and note that each value is used. */
1685 size = 0;
1686 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1687 if ((mode = apply_result_mode[regno]) != VOIDmode)
1688 {
1689 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1690 if (size % align != 0)
1691 size = CEIL (size, align) * align;
1692 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1693 emit_move_insn (reg, adjust_address (result, mode, size));
1694
1695 push_to_sequence (call_fusage);
1696 emit_use (reg);
1697 call_fusage = get_insns ();
1698 end_sequence ();
1699 size += GET_MODE_SIZE (mode);
1700 }
1701
1702 /* Put the USE insns before the return. */
1703 emit_insn (call_fusage);
1704
1705 /* Return whatever values was restored by jumping directly to the end
1706 of the function. */
1707 expand_naked_return ();
1708 }
1709
1710 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1711
1712 static enum type_class
1713 type_to_class (tree type)
1714 {
1715 switch (TREE_CODE (type))
1716 {
1717 case VOID_TYPE: return void_type_class;
1718 case INTEGER_TYPE: return integer_type_class;
1719 case ENUMERAL_TYPE: return enumeral_type_class;
1720 case BOOLEAN_TYPE: return boolean_type_class;
1721 case POINTER_TYPE: return pointer_type_class;
1722 case REFERENCE_TYPE: return reference_type_class;
1723 case OFFSET_TYPE: return offset_type_class;
1724 case REAL_TYPE: return real_type_class;
1725 case COMPLEX_TYPE: return complex_type_class;
1726 case FUNCTION_TYPE: return function_type_class;
1727 case METHOD_TYPE: return method_type_class;
1728 case RECORD_TYPE: return record_type_class;
1729 case UNION_TYPE:
1730 case QUAL_UNION_TYPE: return union_type_class;
1731 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1732 ? string_type_class : array_type_class);
1733 case LANG_TYPE: return lang_type_class;
1734 default: return no_type_class;
1735 }
1736 }
1737
1738 /* Expand a call EXP to __builtin_classify_type. */
1739
1740 static rtx
1741 expand_builtin_classify_type (tree exp)
1742 {
1743 if (call_expr_nargs (exp))
1744 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1745 return GEN_INT (no_type_class);
1746 }
1747
1748 /* This helper macro, meant to be used in mathfn_built_in below,
1749 determines which among a set of three builtin math functions is
1750 appropriate for a given type mode. The `F' and `L' cases are
1751 automatically generated from the `double' case. */
1752 #define CASE_MATHFN(MATHFN) \
1753 CASE_CFN_##MATHFN: \
1754 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1755 fcodel = BUILT_IN_##MATHFN##L ; break;
1756 /* Similar to above, but appends _R after any F/L suffix. */
1757 #define CASE_MATHFN_REENT(MATHFN) \
1758 case CFN_BUILT_IN_##MATHFN##_R: \
1759 case CFN_BUILT_IN_##MATHFN##F_R: \
1760 case CFN_BUILT_IN_##MATHFN##L_R: \
1761 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1762 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1763
1764 /* Return a function equivalent to FN but operating on floating-point
1765 values of type TYPE, or END_BUILTINS if no such function exists.
1766 This is purely an operation on function codes; it does not guarantee
1767 that the target actually has an implementation of the function. */
1768
1769 static built_in_function
1770 mathfn_built_in_2 (tree type, combined_fn fn)
1771 {
1772 built_in_function fcode, fcodef, fcodel;
1773
1774 switch (fn)
1775 {
1776 CASE_MATHFN (ACOS)
1777 CASE_MATHFN (ACOSH)
1778 CASE_MATHFN (ASIN)
1779 CASE_MATHFN (ASINH)
1780 CASE_MATHFN (ATAN)
1781 CASE_MATHFN (ATAN2)
1782 CASE_MATHFN (ATANH)
1783 CASE_MATHFN (CBRT)
1784 CASE_MATHFN (CEIL)
1785 CASE_MATHFN (CEXPI)
1786 CASE_MATHFN (COPYSIGN)
1787 CASE_MATHFN (COS)
1788 CASE_MATHFN (COSH)
1789 CASE_MATHFN (DREM)
1790 CASE_MATHFN (ERF)
1791 CASE_MATHFN (ERFC)
1792 CASE_MATHFN (EXP)
1793 CASE_MATHFN (EXP10)
1794 CASE_MATHFN (EXP2)
1795 CASE_MATHFN (EXPM1)
1796 CASE_MATHFN (FABS)
1797 CASE_MATHFN (FDIM)
1798 CASE_MATHFN (FLOOR)
1799 CASE_MATHFN (FMA)
1800 CASE_MATHFN (FMAX)
1801 CASE_MATHFN (FMIN)
1802 CASE_MATHFN (FMOD)
1803 CASE_MATHFN (FREXP)
1804 CASE_MATHFN (GAMMA)
1805 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1806 CASE_MATHFN (HUGE_VAL)
1807 CASE_MATHFN (HYPOT)
1808 CASE_MATHFN (ILOGB)
1809 CASE_MATHFN (ICEIL)
1810 CASE_MATHFN (IFLOOR)
1811 CASE_MATHFN (INF)
1812 CASE_MATHFN (IRINT)
1813 CASE_MATHFN (IROUND)
1814 CASE_MATHFN (ISINF)
1815 CASE_MATHFN (J0)
1816 CASE_MATHFN (J1)
1817 CASE_MATHFN (JN)
1818 CASE_MATHFN (LCEIL)
1819 CASE_MATHFN (LDEXP)
1820 CASE_MATHFN (LFLOOR)
1821 CASE_MATHFN (LGAMMA)
1822 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1823 CASE_MATHFN (LLCEIL)
1824 CASE_MATHFN (LLFLOOR)
1825 CASE_MATHFN (LLRINT)
1826 CASE_MATHFN (LLROUND)
1827 CASE_MATHFN (LOG)
1828 CASE_MATHFN (LOG10)
1829 CASE_MATHFN (LOG1P)
1830 CASE_MATHFN (LOG2)
1831 CASE_MATHFN (LOGB)
1832 CASE_MATHFN (LRINT)
1833 CASE_MATHFN (LROUND)
1834 CASE_MATHFN (MODF)
1835 CASE_MATHFN (NAN)
1836 CASE_MATHFN (NANS)
1837 CASE_MATHFN (NEARBYINT)
1838 CASE_MATHFN (NEXTAFTER)
1839 CASE_MATHFN (NEXTTOWARD)
1840 CASE_MATHFN (POW)
1841 CASE_MATHFN (POWI)
1842 CASE_MATHFN (POW10)
1843 CASE_MATHFN (REMAINDER)
1844 CASE_MATHFN (REMQUO)
1845 CASE_MATHFN (RINT)
1846 CASE_MATHFN (ROUND)
1847 CASE_MATHFN (SCALB)
1848 CASE_MATHFN (SCALBLN)
1849 CASE_MATHFN (SCALBN)
1850 CASE_MATHFN (SIGNBIT)
1851 CASE_MATHFN (SIGNIFICAND)
1852 CASE_MATHFN (SIN)
1853 CASE_MATHFN (SINCOS)
1854 CASE_MATHFN (SINH)
1855 CASE_MATHFN (SQRT)
1856 CASE_MATHFN (TAN)
1857 CASE_MATHFN (TANH)
1858 CASE_MATHFN (TGAMMA)
1859 CASE_MATHFN (TRUNC)
1860 CASE_MATHFN (Y0)
1861 CASE_MATHFN (Y1)
1862 CASE_MATHFN (YN)
1863
1864 default:
1865 return END_BUILTINS;
1866 }
1867
1868 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1869 return fcode;
1870 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1871 return fcodef;
1872 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1873 return fcodel;
1874 else
1875 return END_BUILTINS;
1876 }
1877
1878 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1879 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1880 otherwise use the explicit declaration. If we can't do the conversion,
1881 return null. */
1882
1883 static tree
1884 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1885 {
1886 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1887 if (fcode2 == END_BUILTINS)
1888 return NULL_TREE;
1889
1890 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1891 return NULL_TREE;
1892
1893 return builtin_decl_explicit (fcode2);
1894 }
1895
1896 /* Like mathfn_built_in_1, but always use the implicit array. */
1897
1898 tree
1899 mathfn_built_in (tree type, combined_fn fn)
1900 {
1901 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1902 }
1903
1904 /* Like mathfn_built_in_1, but take a built_in_function and
1905 always use the implicit array. */
1906
1907 tree
1908 mathfn_built_in (tree type, enum built_in_function fn)
1909 {
1910 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1911 }
1912
1913 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1914 return its code, otherwise return IFN_LAST. Note that this function
1915 only tests whether the function is defined in internals.def, not whether
1916 it is actually available on the target. */
1917
1918 internal_fn
1919 associated_internal_fn (tree fndecl)
1920 {
1921 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1922 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1923 switch (DECL_FUNCTION_CODE (fndecl))
1924 {
1925 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1926 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1927 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1928 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1929 #include "internal-fn.def"
1930
1931 CASE_FLT_FN (BUILT_IN_POW10):
1932 return IFN_EXP10;
1933
1934 CASE_FLT_FN (BUILT_IN_DREM):
1935 return IFN_REMAINDER;
1936
1937 CASE_FLT_FN (BUILT_IN_SCALBN):
1938 CASE_FLT_FN (BUILT_IN_SCALBLN):
1939 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1940 return IFN_LDEXP;
1941 return IFN_LAST;
1942
1943 default:
1944 return IFN_LAST;
1945 }
1946 }
1947
1948 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1949 on the current target by a call to an internal function, return the
1950 code of that internal function, otherwise return IFN_LAST. The caller
1951 is responsible for ensuring that any side-effects of the built-in
1952 call are dealt with correctly. E.g. if CALL sets errno, the caller
1953 must decide that the errno result isn't needed or make it available
1954 in some other way. */
1955
1956 internal_fn
1957 replacement_internal_fn (gcall *call)
1958 {
1959 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1960 {
1961 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1962 if (ifn != IFN_LAST)
1963 {
1964 tree_pair types = direct_internal_fn_types (ifn, call);
1965 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1966 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1967 return ifn;
1968 }
1969 }
1970 return IFN_LAST;
1971 }
1972
1973 /* Expand a call to the builtin trinary math functions (fma).
1974 Return NULL_RTX if a normal call should be emitted rather than expanding the
1975 function in-line. EXP is the expression that is a call to the builtin
1976 function; if convenient, the result should be placed in TARGET.
1977 SUBTARGET may be used as the target for computing one of EXP's
1978 operands. */
1979
1980 static rtx
1981 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1982 {
1983 optab builtin_optab;
1984 rtx op0, op1, op2, result;
1985 rtx_insn *insns;
1986 tree fndecl = get_callee_fndecl (exp);
1987 tree arg0, arg1, arg2;
1988 machine_mode mode;
1989
1990 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1991 return NULL_RTX;
1992
1993 arg0 = CALL_EXPR_ARG (exp, 0);
1994 arg1 = CALL_EXPR_ARG (exp, 1);
1995 arg2 = CALL_EXPR_ARG (exp, 2);
1996
1997 switch (DECL_FUNCTION_CODE (fndecl))
1998 {
1999 CASE_FLT_FN (BUILT_IN_FMA):
2000 builtin_optab = fma_optab; break;
2001 default:
2002 gcc_unreachable ();
2003 }
2004
2005 /* Make a suitable register to place result in. */
2006 mode = TYPE_MODE (TREE_TYPE (exp));
2007
2008 /* Before working hard, check whether the instruction is available. */
2009 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2010 return NULL_RTX;
2011
2012 result = gen_reg_rtx (mode);
2013
2014 /* Always stabilize the argument list. */
2015 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2016 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2017 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2018
2019 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2020 op1 = expand_normal (arg1);
2021 op2 = expand_normal (arg2);
2022
2023 start_sequence ();
2024
2025 /* Compute into RESULT.
2026 Set RESULT to wherever the result comes back. */
2027 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2028 result, 0);
2029
2030 /* If we were unable to expand via the builtin, stop the sequence
2031 (without outputting the insns) and call to the library function
2032 with the stabilized argument list. */
2033 if (result == 0)
2034 {
2035 end_sequence ();
2036 return expand_call (exp, target, target == const0_rtx);
2037 }
2038
2039 /* Output the entire sequence. */
2040 insns = get_insns ();
2041 end_sequence ();
2042 emit_insn (insns);
2043
2044 return result;
2045 }
2046
2047 /* Expand a call to the builtin sin and cos math functions.
2048 Return NULL_RTX if a normal call should be emitted rather than expanding the
2049 function in-line. EXP is the expression that is a call to the builtin
2050 function; if convenient, the result should be placed in TARGET.
2051 SUBTARGET may be used as the target for computing one of EXP's
2052 operands. */
2053
2054 static rtx
2055 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2056 {
2057 optab builtin_optab;
2058 rtx op0;
2059 rtx_insn *insns;
2060 tree fndecl = get_callee_fndecl (exp);
2061 machine_mode mode;
2062 tree arg;
2063
2064 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2065 return NULL_RTX;
2066
2067 arg = CALL_EXPR_ARG (exp, 0);
2068
2069 switch (DECL_FUNCTION_CODE (fndecl))
2070 {
2071 CASE_FLT_FN (BUILT_IN_SIN):
2072 CASE_FLT_FN (BUILT_IN_COS):
2073 builtin_optab = sincos_optab; break;
2074 default:
2075 gcc_unreachable ();
2076 }
2077
2078 /* Make a suitable register to place result in. */
2079 mode = TYPE_MODE (TREE_TYPE (exp));
2080
2081 /* Check if sincos insn is available, otherwise fallback
2082 to sin or cos insn. */
2083 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2084 switch (DECL_FUNCTION_CODE (fndecl))
2085 {
2086 CASE_FLT_FN (BUILT_IN_SIN):
2087 builtin_optab = sin_optab; break;
2088 CASE_FLT_FN (BUILT_IN_COS):
2089 builtin_optab = cos_optab; break;
2090 default:
2091 gcc_unreachable ();
2092 }
2093
2094 /* Before working hard, check whether the instruction is available. */
2095 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2096 {
2097 rtx result = gen_reg_rtx (mode);
2098
2099 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2100 need to expand the argument again. This way, we will not perform
2101 side-effects more the once. */
2102 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2103
2104 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2105
2106 start_sequence ();
2107
2108 /* Compute into RESULT.
2109 Set RESULT to wherever the result comes back. */
2110 if (builtin_optab == sincos_optab)
2111 {
2112 int ok;
2113
2114 switch (DECL_FUNCTION_CODE (fndecl))
2115 {
2116 CASE_FLT_FN (BUILT_IN_SIN):
2117 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2118 break;
2119 CASE_FLT_FN (BUILT_IN_COS):
2120 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2121 break;
2122 default:
2123 gcc_unreachable ();
2124 }
2125 gcc_assert (ok);
2126 }
2127 else
2128 result = expand_unop (mode, builtin_optab, op0, result, 0);
2129
2130 if (result != 0)
2131 {
2132 /* Output the entire sequence. */
2133 insns = get_insns ();
2134 end_sequence ();
2135 emit_insn (insns);
2136 return result;
2137 }
2138
2139 /* If we were unable to expand via the builtin, stop the sequence
2140 (without outputting the insns) and call to the library function
2141 with the stabilized argument list. */
2142 end_sequence ();
2143 }
2144
2145 return expand_call (exp, target, target == const0_rtx);
2146 }
2147
2148 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2149 return an RTL instruction code that implements the functionality.
2150 If that isn't possible or available return CODE_FOR_nothing. */
2151
2152 static enum insn_code
2153 interclass_mathfn_icode (tree arg, tree fndecl)
2154 {
2155 bool errno_set = false;
2156 optab builtin_optab = unknown_optab;
2157 machine_mode mode;
2158
2159 switch (DECL_FUNCTION_CODE (fndecl))
2160 {
2161 CASE_FLT_FN (BUILT_IN_ILOGB):
2162 errno_set = true; builtin_optab = ilogb_optab; break;
2163 CASE_FLT_FN (BUILT_IN_ISINF):
2164 builtin_optab = isinf_optab; break;
2165 case BUILT_IN_ISNORMAL:
2166 case BUILT_IN_ISFINITE:
2167 CASE_FLT_FN (BUILT_IN_FINITE):
2168 case BUILT_IN_FINITED32:
2169 case BUILT_IN_FINITED64:
2170 case BUILT_IN_FINITED128:
2171 case BUILT_IN_ISINFD32:
2172 case BUILT_IN_ISINFD64:
2173 case BUILT_IN_ISINFD128:
2174 /* These builtins have no optabs (yet). */
2175 break;
2176 default:
2177 gcc_unreachable ();
2178 }
2179
2180 /* There's no easy way to detect the case we need to set EDOM. */
2181 if (flag_errno_math && errno_set)
2182 return CODE_FOR_nothing;
2183
2184 /* Optab mode depends on the mode of the input argument. */
2185 mode = TYPE_MODE (TREE_TYPE (arg));
2186
2187 if (builtin_optab)
2188 return optab_handler (builtin_optab, mode);
2189 return CODE_FOR_nothing;
2190 }
2191
2192 /* Expand a call to one of the builtin math functions that operate on
2193 floating point argument and output an integer result (ilogb, isinf,
2194 isnan, etc).
2195 Return 0 if a normal call should be emitted rather than expanding the
2196 function in-line. EXP is the expression that is a call to the builtin
2197 function; if convenient, the result should be placed in TARGET. */
2198
2199 static rtx
2200 expand_builtin_interclass_mathfn (tree exp, rtx target)
2201 {
2202 enum insn_code icode = CODE_FOR_nothing;
2203 rtx op0;
2204 tree fndecl = get_callee_fndecl (exp);
2205 machine_mode mode;
2206 tree arg;
2207
2208 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2209 return NULL_RTX;
2210
2211 arg = CALL_EXPR_ARG (exp, 0);
2212 icode = interclass_mathfn_icode (arg, fndecl);
2213 mode = TYPE_MODE (TREE_TYPE (arg));
2214
2215 if (icode != CODE_FOR_nothing)
2216 {
2217 struct expand_operand ops[1];
2218 rtx_insn *last = get_last_insn ();
2219 tree orig_arg = arg;
2220
2221 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2222 need to expand the argument again. This way, we will not perform
2223 side-effects more the once. */
2224 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2225
2226 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2227
2228 if (mode != GET_MODE (op0))
2229 op0 = convert_to_mode (mode, op0, 0);
2230
2231 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2232 if (maybe_legitimize_operands (icode, 0, 1, ops)
2233 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2234 return ops[0].value;
2235
2236 delete_insns_since (last);
2237 CALL_EXPR_ARG (exp, 0) = orig_arg;
2238 }
2239
2240 return NULL_RTX;
2241 }
2242
2243 /* Expand a call to the builtin sincos math function.
2244 Return NULL_RTX if a normal call should be emitted rather than expanding the
2245 function in-line. EXP is the expression that is a call to the builtin
2246 function. */
2247
2248 static rtx
2249 expand_builtin_sincos (tree exp)
2250 {
2251 rtx op0, op1, op2, target1, target2;
2252 machine_mode mode;
2253 tree arg, sinp, cosp;
2254 int result;
2255 location_t loc = EXPR_LOCATION (exp);
2256 tree alias_type, alias_off;
2257
2258 if (!validate_arglist (exp, REAL_TYPE,
2259 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2260 return NULL_RTX;
2261
2262 arg = CALL_EXPR_ARG (exp, 0);
2263 sinp = CALL_EXPR_ARG (exp, 1);
2264 cosp = CALL_EXPR_ARG (exp, 2);
2265
2266 /* Make a suitable register to place result in. */
2267 mode = TYPE_MODE (TREE_TYPE (arg));
2268
2269 /* Check if sincos insn is available, otherwise emit the call. */
2270 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2271 return NULL_RTX;
2272
2273 target1 = gen_reg_rtx (mode);
2274 target2 = gen_reg_rtx (mode);
2275
2276 op0 = expand_normal (arg);
2277 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2278 alias_off = build_int_cst (alias_type, 0);
2279 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2280 sinp, alias_off));
2281 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2282 cosp, alias_off));
2283
2284 /* Compute into target1 and target2.
2285 Set TARGET to wherever the result comes back. */
2286 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2287 gcc_assert (result);
2288
2289 /* Move target1 and target2 to the memory locations indicated
2290 by op1 and op2. */
2291 emit_move_insn (op1, target1);
2292 emit_move_insn (op2, target2);
2293
2294 return const0_rtx;
2295 }
2296
2297 /* Expand a call to the internal cexpi builtin to the sincos math function.
2298 EXP is the expression that is a call to the builtin function; if convenient,
2299 the result should be placed in TARGET. */
2300
2301 static rtx
2302 expand_builtin_cexpi (tree exp, rtx target)
2303 {
2304 tree fndecl = get_callee_fndecl (exp);
2305 tree arg, type;
2306 machine_mode mode;
2307 rtx op0, op1, op2;
2308 location_t loc = EXPR_LOCATION (exp);
2309
2310 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2311 return NULL_RTX;
2312
2313 arg = CALL_EXPR_ARG (exp, 0);
2314 type = TREE_TYPE (arg);
2315 mode = TYPE_MODE (TREE_TYPE (arg));
2316
2317 /* Try expanding via a sincos optab, fall back to emitting a libcall
2318 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2319 is only generated from sincos, cexp or if we have either of them. */
2320 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2321 {
2322 op1 = gen_reg_rtx (mode);
2323 op2 = gen_reg_rtx (mode);
2324
2325 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2326
2327 /* Compute into op1 and op2. */
2328 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2329 }
2330 else if (targetm.libc_has_function (function_sincos))
2331 {
2332 tree call, fn = NULL_TREE;
2333 tree top1, top2;
2334 rtx op1a, op2a;
2335
2336 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2337 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2338 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2339 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2340 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2341 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2342 else
2343 gcc_unreachable ();
2344
2345 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2346 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2347 op1a = copy_addr_to_reg (XEXP (op1, 0));
2348 op2a = copy_addr_to_reg (XEXP (op2, 0));
2349 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2350 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2351
2352 /* Make sure not to fold the sincos call again. */
2353 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2354 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2355 call, 3, arg, top1, top2));
2356 }
2357 else
2358 {
2359 tree call, fn = NULL_TREE, narg;
2360 tree ctype = build_complex_type (type);
2361
2362 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2363 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2364 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2365 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2366 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2367 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2368 else
2369 gcc_unreachable ();
2370
2371 /* If we don't have a decl for cexp create one. This is the
2372 friendliest fallback if the user calls __builtin_cexpi
2373 without full target C99 function support. */
2374 if (fn == NULL_TREE)
2375 {
2376 tree fntype;
2377 const char *name = NULL;
2378
2379 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2380 name = "cexpf";
2381 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2382 name = "cexp";
2383 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2384 name = "cexpl";
2385
2386 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2387 fn = build_fn_decl (name, fntype);
2388 }
2389
2390 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2391 build_real (type, dconst0), arg);
2392
2393 /* Make sure not to fold the cexp call again. */
2394 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2395 return expand_expr (build_call_nary (ctype, call, 1, narg),
2396 target, VOIDmode, EXPAND_NORMAL);
2397 }
2398
2399 /* Now build the proper return type. */
2400 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2401 make_tree (TREE_TYPE (arg), op2),
2402 make_tree (TREE_TYPE (arg), op1)),
2403 target, VOIDmode, EXPAND_NORMAL);
2404 }
2405
2406 /* Conveniently construct a function call expression. FNDECL names the
2407 function to be called, N is the number of arguments, and the "..."
2408 parameters are the argument expressions. Unlike build_call_exr
2409 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2410
2411 static tree
2412 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2413 {
2414 va_list ap;
2415 tree fntype = TREE_TYPE (fndecl);
2416 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2417
2418 va_start (ap, n);
2419 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2420 va_end (ap);
2421 SET_EXPR_LOCATION (fn, loc);
2422 return fn;
2423 }
2424
2425 /* Expand a call to one of the builtin rounding functions gcc defines
2426 as an extension (lfloor and lceil). As these are gcc extensions we
2427 do not need to worry about setting errno to EDOM.
2428 If expanding via optab fails, lower expression to (int)(floor(x)).
2429 EXP is the expression that is a call to the builtin function;
2430 if convenient, the result should be placed in TARGET. */
2431
2432 static rtx
2433 expand_builtin_int_roundingfn (tree exp, rtx target)
2434 {
2435 convert_optab builtin_optab;
2436 rtx op0, tmp;
2437 rtx_insn *insns;
2438 tree fndecl = get_callee_fndecl (exp);
2439 enum built_in_function fallback_fn;
2440 tree fallback_fndecl;
2441 machine_mode mode;
2442 tree arg;
2443
2444 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2445 gcc_unreachable ();
2446
2447 arg = CALL_EXPR_ARG (exp, 0);
2448
2449 switch (DECL_FUNCTION_CODE (fndecl))
2450 {
2451 CASE_FLT_FN (BUILT_IN_ICEIL):
2452 CASE_FLT_FN (BUILT_IN_LCEIL):
2453 CASE_FLT_FN (BUILT_IN_LLCEIL):
2454 builtin_optab = lceil_optab;
2455 fallback_fn = BUILT_IN_CEIL;
2456 break;
2457
2458 CASE_FLT_FN (BUILT_IN_IFLOOR):
2459 CASE_FLT_FN (BUILT_IN_LFLOOR):
2460 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2461 builtin_optab = lfloor_optab;
2462 fallback_fn = BUILT_IN_FLOOR;
2463 break;
2464
2465 default:
2466 gcc_unreachable ();
2467 }
2468
2469 /* Make a suitable register to place result in. */
2470 mode = TYPE_MODE (TREE_TYPE (exp));
2471
2472 target = gen_reg_rtx (mode);
2473
2474 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2475 need to expand the argument again. This way, we will not perform
2476 side-effects more the once. */
2477 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2478
2479 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2480
2481 start_sequence ();
2482
2483 /* Compute into TARGET. */
2484 if (expand_sfix_optab (target, op0, builtin_optab))
2485 {
2486 /* Output the entire sequence. */
2487 insns = get_insns ();
2488 end_sequence ();
2489 emit_insn (insns);
2490 return target;
2491 }
2492
2493 /* If we were unable to expand via the builtin, stop the sequence
2494 (without outputting the insns). */
2495 end_sequence ();
2496
2497 /* Fall back to floating point rounding optab. */
2498 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2499
2500 /* For non-C99 targets we may end up without a fallback fndecl here
2501 if the user called __builtin_lfloor directly. In this case emit
2502 a call to the floor/ceil variants nevertheless. This should result
2503 in the best user experience for not full C99 targets. */
2504 if (fallback_fndecl == NULL_TREE)
2505 {
2506 tree fntype;
2507 const char *name = NULL;
2508
2509 switch (DECL_FUNCTION_CODE (fndecl))
2510 {
2511 case BUILT_IN_ICEIL:
2512 case BUILT_IN_LCEIL:
2513 case BUILT_IN_LLCEIL:
2514 name = "ceil";
2515 break;
2516 case BUILT_IN_ICEILF:
2517 case BUILT_IN_LCEILF:
2518 case BUILT_IN_LLCEILF:
2519 name = "ceilf";
2520 break;
2521 case BUILT_IN_ICEILL:
2522 case BUILT_IN_LCEILL:
2523 case BUILT_IN_LLCEILL:
2524 name = "ceill";
2525 break;
2526 case BUILT_IN_IFLOOR:
2527 case BUILT_IN_LFLOOR:
2528 case BUILT_IN_LLFLOOR:
2529 name = "floor";
2530 break;
2531 case BUILT_IN_IFLOORF:
2532 case BUILT_IN_LFLOORF:
2533 case BUILT_IN_LLFLOORF:
2534 name = "floorf";
2535 break;
2536 case BUILT_IN_IFLOORL:
2537 case BUILT_IN_LFLOORL:
2538 case BUILT_IN_LLFLOORL:
2539 name = "floorl";
2540 break;
2541 default:
2542 gcc_unreachable ();
2543 }
2544
2545 fntype = build_function_type_list (TREE_TYPE (arg),
2546 TREE_TYPE (arg), NULL_TREE);
2547 fallback_fndecl = build_fn_decl (name, fntype);
2548 }
2549
2550 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2551
2552 tmp = expand_normal (exp);
2553 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2554
2555 /* Truncate the result of floating point optab to integer
2556 via expand_fix (). */
2557 target = gen_reg_rtx (mode);
2558 expand_fix (target, tmp, 0);
2559
2560 return target;
2561 }
2562
2563 /* Expand a call to one of the builtin math functions doing integer
2564 conversion (lrint).
2565 Return 0 if a normal call should be emitted rather than expanding the
2566 function in-line. EXP is the expression that is a call to the builtin
2567 function; if convenient, the result should be placed in TARGET. */
2568
2569 static rtx
2570 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2571 {
2572 convert_optab builtin_optab;
2573 rtx op0;
2574 rtx_insn *insns;
2575 tree fndecl = get_callee_fndecl (exp);
2576 tree arg;
2577 machine_mode mode;
2578 enum built_in_function fallback_fn = BUILT_IN_NONE;
2579
2580 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2581 gcc_unreachable ();
2582
2583 arg = CALL_EXPR_ARG (exp, 0);
2584
2585 switch (DECL_FUNCTION_CODE (fndecl))
2586 {
2587 CASE_FLT_FN (BUILT_IN_IRINT):
2588 fallback_fn = BUILT_IN_LRINT;
2589 /* FALLTHRU */
2590 CASE_FLT_FN (BUILT_IN_LRINT):
2591 CASE_FLT_FN (BUILT_IN_LLRINT):
2592 builtin_optab = lrint_optab;
2593 break;
2594
2595 CASE_FLT_FN (BUILT_IN_IROUND):
2596 fallback_fn = BUILT_IN_LROUND;
2597 /* FALLTHRU */
2598 CASE_FLT_FN (BUILT_IN_LROUND):
2599 CASE_FLT_FN (BUILT_IN_LLROUND):
2600 builtin_optab = lround_optab;
2601 break;
2602
2603 default:
2604 gcc_unreachable ();
2605 }
2606
2607 /* There's no easy way to detect the case we need to set EDOM. */
2608 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2609 return NULL_RTX;
2610
2611 /* Make a suitable register to place result in. */
2612 mode = TYPE_MODE (TREE_TYPE (exp));
2613
2614 /* There's no easy way to detect the case we need to set EDOM. */
2615 if (!flag_errno_math)
2616 {
2617 rtx result = gen_reg_rtx (mode);
2618
2619 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2620 need to expand the argument again. This way, we will not perform
2621 side-effects more the once. */
2622 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2623
2624 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2625
2626 start_sequence ();
2627
2628 if (expand_sfix_optab (result, op0, builtin_optab))
2629 {
2630 /* Output the entire sequence. */
2631 insns = get_insns ();
2632 end_sequence ();
2633 emit_insn (insns);
2634 return result;
2635 }
2636
2637 /* If we were unable to expand via the builtin, stop the sequence
2638 (without outputting the insns) and call to the library function
2639 with the stabilized argument list. */
2640 end_sequence ();
2641 }
2642
2643 if (fallback_fn != BUILT_IN_NONE)
2644 {
2645 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2646 targets, (int) round (x) should never be transformed into
2647 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2648 a call to lround in the hope that the target provides at least some
2649 C99 functions. This should result in the best user experience for
2650 not full C99 targets. */
2651 tree fallback_fndecl = mathfn_built_in_1
2652 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2653
2654 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2655 fallback_fndecl, 1, arg);
2656
2657 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2658 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2659 return convert_to_mode (mode, target, 0);
2660 }
2661
2662 return expand_call (exp, target, target == const0_rtx);
2663 }
2664
2665 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2666 a normal call should be emitted rather than expanding the function
2667 in-line. EXP is the expression that is a call to the builtin
2668 function; if convenient, the result should be placed in TARGET. */
2669
2670 static rtx
2671 expand_builtin_powi (tree exp, rtx target)
2672 {
2673 tree arg0, arg1;
2674 rtx op0, op1;
2675 machine_mode mode;
2676 machine_mode mode2;
2677
2678 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2679 return NULL_RTX;
2680
2681 arg0 = CALL_EXPR_ARG (exp, 0);
2682 arg1 = CALL_EXPR_ARG (exp, 1);
2683 mode = TYPE_MODE (TREE_TYPE (exp));
2684
2685 /* Emit a libcall to libgcc. */
2686
2687 /* Mode of the 2nd argument must match that of an int. */
2688 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2689
2690 if (target == NULL_RTX)
2691 target = gen_reg_rtx (mode);
2692
2693 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2694 if (GET_MODE (op0) != mode)
2695 op0 = convert_to_mode (mode, op0, 0);
2696 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2697 if (GET_MODE (op1) != mode2)
2698 op1 = convert_to_mode (mode2, op1, 0);
2699
2700 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2701 target, LCT_CONST, mode, 2,
2702 op0, mode, op1, mode2);
2703
2704 return target;
2705 }
2706
2707 /* Expand expression EXP which is a call to the strlen builtin. Return
2708 NULL_RTX if we failed the caller should emit a normal call, otherwise
2709 try to get the result in TARGET, if convenient. */
2710
2711 static rtx
2712 expand_builtin_strlen (tree exp, rtx target,
2713 machine_mode target_mode)
2714 {
2715 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2716 return NULL_RTX;
2717 else
2718 {
2719 struct expand_operand ops[4];
2720 rtx pat;
2721 tree len;
2722 tree src = CALL_EXPR_ARG (exp, 0);
2723 rtx src_reg;
2724 rtx_insn *before_strlen;
2725 machine_mode insn_mode = target_mode;
2726 enum insn_code icode = CODE_FOR_nothing;
2727 unsigned int align;
2728
2729 /* If the length can be computed at compile-time, return it. */
2730 len = c_strlen (src, 0);
2731 if (len)
2732 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2733
2734 /* If the length can be computed at compile-time and is constant
2735 integer, but there are side-effects in src, evaluate
2736 src for side-effects, then return len.
2737 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2738 can be optimized into: i++; x = 3; */
2739 len = c_strlen (src, 1);
2740 if (len && TREE_CODE (len) == INTEGER_CST)
2741 {
2742 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2743 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2744 }
2745
2746 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2747
2748 /* If SRC is not a pointer type, don't do this operation inline. */
2749 if (align == 0)
2750 return NULL_RTX;
2751
2752 /* Bail out if we can't compute strlen in the right mode. */
2753 while (insn_mode != VOIDmode)
2754 {
2755 icode = optab_handler (strlen_optab, insn_mode);
2756 if (icode != CODE_FOR_nothing)
2757 break;
2758
2759 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2760 }
2761 if (insn_mode == VOIDmode)
2762 return NULL_RTX;
2763
2764 /* Make a place to hold the source address. We will not expand
2765 the actual source until we are sure that the expansion will
2766 not fail -- there are trees that cannot be expanded twice. */
2767 src_reg = gen_reg_rtx (Pmode);
2768
2769 /* Mark the beginning of the strlen sequence so we can emit the
2770 source operand later. */
2771 before_strlen = get_last_insn ();
2772
2773 create_output_operand (&ops[0], target, insn_mode);
2774 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2775 create_integer_operand (&ops[2], 0);
2776 create_integer_operand (&ops[3], align);
2777 if (!maybe_expand_insn (icode, 4, ops))
2778 return NULL_RTX;
2779
2780 /* Now that we are assured of success, expand the source. */
2781 start_sequence ();
2782 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2783 if (pat != src_reg)
2784 {
2785 #ifdef POINTERS_EXTEND_UNSIGNED
2786 if (GET_MODE (pat) != Pmode)
2787 pat = convert_to_mode (Pmode, pat,
2788 POINTERS_EXTEND_UNSIGNED);
2789 #endif
2790 emit_move_insn (src_reg, pat);
2791 }
2792 pat = get_insns ();
2793 end_sequence ();
2794
2795 if (before_strlen)
2796 emit_insn_after (pat, before_strlen);
2797 else
2798 emit_insn_before (pat, get_insns ());
2799
2800 /* Return the value in the proper mode for this function. */
2801 if (GET_MODE (ops[0].value) == target_mode)
2802 target = ops[0].value;
2803 else if (target != 0)
2804 convert_move (target, ops[0].value, 0);
2805 else
2806 target = convert_to_mode (target_mode, ops[0].value, 0);
2807
2808 return target;
2809 }
2810 }
2811
2812 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2813 bytes from constant string DATA + OFFSET and return it as target
2814 constant. */
2815
2816 static rtx
2817 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2818 machine_mode mode)
2819 {
2820 const char *str = (const char *) data;
2821
2822 gcc_assert (offset >= 0
2823 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2824 <= strlen (str) + 1));
2825
2826 return c_readstr (str + offset, mode);
2827 }
2828
2829 /* LEN specify length of the block of memcpy/memset operation.
2830 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2831 In some cases we can make very likely guess on max size, then we
2832 set it into PROBABLE_MAX_SIZE. */
2833
2834 static void
2835 determine_block_size (tree len, rtx len_rtx,
2836 unsigned HOST_WIDE_INT *min_size,
2837 unsigned HOST_WIDE_INT *max_size,
2838 unsigned HOST_WIDE_INT *probable_max_size)
2839 {
2840 if (CONST_INT_P (len_rtx))
2841 {
2842 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2843 return;
2844 }
2845 else
2846 {
2847 wide_int min, max;
2848 enum value_range_type range_type = VR_UNDEFINED;
2849
2850 /* Determine bounds from the type. */
2851 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2852 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2853 else
2854 *min_size = 0;
2855 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2856 *probable_max_size = *max_size
2857 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2858 else
2859 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2860
2861 if (TREE_CODE (len) == SSA_NAME)
2862 range_type = get_range_info (len, &min, &max);
2863 if (range_type == VR_RANGE)
2864 {
2865 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2866 *min_size = min.to_uhwi ();
2867 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2868 *probable_max_size = *max_size = max.to_uhwi ();
2869 }
2870 else if (range_type == VR_ANTI_RANGE)
2871 {
2872 /* Anti range 0...N lets us to determine minimal size to N+1. */
2873 if (min == 0)
2874 {
2875 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2876 *min_size = max.to_uhwi () + 1;
2877 }
2878 /* Code like
2879
2880 int n;
2881 if (n < 100)
2882 memcpy (a, b, n)
2883
2884 Produce anti range allowing negative values of N. We still
2885 can use the information and make a guess that N is not negative.
2886 */
2887 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2888 *probable_max_size = min.to_uhwi () - 1;
2889 }
2890 }
2891 gcc_checking_assert (*max_size <=
2892 (unsigned HOST_WIDE_INT)
2893 GET_MODE_MASK (GET_MODE (len_rtx)));
2894 }
2895
2896 /* Helper function to do the actual work for expand_builtin_memcpy. */
2897
2898 static rtx
2899 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2900 {
2901 const char *src_str;
2902 unsigned int src_align = get_pointer_alignment (src);
2903 unsigned int dest_align = get_pointer_alignment (dest);
2904 rtx dest_mem, src_mem, dest_addr, len_rtx;
2905 HOST_WIDE_INT expected_size = -1;
2906 unsigned int expected_align = 0;
2907 unsigned HOST_WIDE_INT min_size;
2908 unsigned HOST_WIDE_INT max_size;
2909 unsigned HOST_WIDE_INT probable_max_size;
2910
2911 /* If DEST is not a pointer type, call the normal function. */
2912 if (dest_align == 0)
2913 return NULL_RTX;
2914
2915 /* If either SRC is not a pointer type, don't do this
2916 operation in-line. */
2917 if (src_align == 0)
2918 return NULL_RTX;
2919
2920 if (currently_expanding_gimple_stmt)
2921 stringop_block_profile (currently_expanding_gimple_stmt,
2922 &expected_align, &expected_size);
2923
2924 if (expected_align < dest_align)
2925 expected_align = dest_align;
2926 dest_mem = get_memory_rtx (dest, len);
2927 set_mem_align (dest_mem, dest_align);
2928 len_rtx = expand_normal (len);
2929 determine_block_size (len, len_rtx, &min_size, &max_size,
2930 &probable_max_size);
2931 src_str = c_getstr (src);
2932
2933 /* If SRC is a string constant and block move would be done
2934 by pieces, we can avoid loading the string from memory
2935 and only stored the computed constants. */
2936 if (src_str
2937 && CONST_INT_P (len_rtx)
2938 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2939 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2940 CONST_CAST (char *, src_str),
2941 dest_align, false))
2942 {
2943 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2944 builtin_memcpy_read_str,
2945 CONST_CAST (char *, src_str),
2946 dest_align, false, 0);
2947 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2948 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2949 return dest_mem;
2950 }
2951
2952 src_mem = get_memory_rtx (src, len);
2953 set_mem_align (src_mem, src_align);
2954
2955 /* Copy word part most expediently. */
2956 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2957 CALL_EXPR_TAILCALL (exp)
2958 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2959 expected_align, expected_size,
2960 min_size, max_size, probable_max_size);
2961
2962 if (dest_addr == 0)
2963 {
2964 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2965 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2966 }
2967
2968 return dest_addr;
2969 }
2970
2971 /* Expand a call EXP to the memcpy builtin.
2972 Return NULL_RTX if we failed, the caller should emit a normal call,
2973 otherwise try to get the result in TARGET, if convenient (and in
2974 mode MODE if that's convenient). */
2975
2976 static rtx
2977 expand_builtin_memcpy (tree exp, rtx target)
2978 {
2979 if (!validate_arglist (exp,
2980 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2981 return NULL_RTX;
2982 else
2983 {
2984 tree dest = CALL_EXPR_ARG (exp, 0);
2985 tree src = CALL_EXPR_ARG (exp, 1);
2986 tree len = CALL_EXPR_ARG (exp, 2);
2987 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2988 }
2989 }
2990
2991 /* Expand an instrumented call EXP to the memcpy builtin.
2992 Return NULL_RTX if we failed, the caller should emit a normal call,
2993 otherwise try to get the result in TARGET, if convenient (and in
2994 mode MODE if that's convenient). */
2995
2996 static rtx
2997 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
2998 {
2999 if (!validate_arglist (exp,
3000 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3001 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3002 INTEGER_TYPE, VOID_TYPE))
3003 return NULL_RTX;
3004 else
3005 {
3006 tree dest = CALL_EXPR_ARG (exp, 0);
3007 tree src = CALL_EXPR_ARG (exp, 2);
3008 tree len = CALL_EXPR_ARG (exp, 4);
3009 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3010
3011 /* Return src bounds with the result. */
3012 if (res)
3013 {
3014 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3015 expand_normal (CALL_EXPR_ARG (exp, 1)));
3016 res = chkp_join_splitted_slot (res, bnd);
3017 }
3018 return res;
3019 }
3020 }
3021
3022 /* Expand a call EXP to the mempcpy builtin.
3023 Return NULL_RTX if we failed; the caller should emit a normal call,
3024 otherwise try to get the result in TARGET, if convenient (and in
3025 mode MODE if that's convenient). If ENDP is 0 return the
3026 destination pointer, if ENDP is 1 return the end pointer ala
3027 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3028 stpcpy. */
3029
3030 static rtx
3031 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3032 {
3033 if (!validate_arglist (exp,
3034 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3035 return NULL_RTX;
3036 else
3037 {
3038 tree dest = CALL_EXPR_ARG (exp, 0);
3039 tree src = CALL_EXPR_ARG (exp, 1);
3040 tree len = CALL_EXPR_ARG (exp, 2);
3041 return expand_builtin_mempcpy_args (dest, src, len,
3042 target, mode, /*endp=*/ 1,
3043 exp);
3044 }
3045 }
3046
3047 /* Expand an instrumented call EXP to the mempcpy builtin.
3048 Return NULL_RTX if we failed, the caller should emit a normal call,
3049 otherwise try to get the result in TARGET, if convenient (and in
3050 mode MODE if that's convenient). */
3051
3052 static rtx
3053 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3054 {
3055 if (!validate_arglist (exp,
3056 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3057 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3058 INTEGER_TYPE, VOID_TYPE))
3059 return NULL_RTX;
3060 else
3061 {
3062 tree dest = CALL_EXPR_ARG (exp, 0);
3063 tree src = CALL_EXPR_ARG (exp, 2);
3064 tree len = CALL_EXPR_ARG (exp, 4);
3065 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3066 mode, 1, exp);
3067
3068 /* Return src bounds with the result. */
3069 if (res)
3070 {
3071 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3072 expand_normal (CALL_EXPR_ARG (exp, 1)));
3073 res = chkp_join_splitted_slot (res, bnd);
3074 }
3075 return res;
3076 }
3077 }
3078
3079 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3080 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3081 so that this can also be called without constructing an actual CALL_EXPR.
3082 The other arguments and return value are the same as for
3083 expand_builtin_mempcpy. */
3084
3085 static rtx
3086 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3087 rtx target, machine_mode mode, int endp,
3088 tree orig_exp)
3089 {
3090 tree fndecl = get_callee_fndecl (orig_exp);
3091
3092 /* If return value is ignored, transform mempcpy into memcpy. */
3093 if (target == const0_rtx
3094 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3095 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3096 {
3097 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3098 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3099 dest, src, len);
3100 return expand_expr (result, target, mode, EXPAND_NORMAL);
3101 }
3102 else if (target == const0_rtx
3103 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3104 {
3105 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3106 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3107 dest, src, len);
3108 return expand_expr (result, target, mode, EXPAND_NORMAL);
3109 }
3110 else
3111 {
3112 const char *src_str;
3113 unsigned int src_align = get_pointer_alignment (src);
3114 unsigned int dest_align = get_pointer_alignment (dest);
3115 rtx dest_mem, src_mem, len_rtx;
3116
3117 /* If either SRC or DEST is not a pointer type, don't do this
3118 operation in-line. */
3119 if (dest_align == 0 || src_align == 0)
3120 return NULL_RTX;
3121
3122 /* If LEN is not constant, call the normal function. */
3123 if (! tree_fits_uhwi_p (len))
3124 return NULL_RTX;
3125
3126 len_rtx = expand_normal (len);
3127 src_str = c_getstr (src);
3128
3129 /* If SRC is a string constant and block move would be done
3130 by pieces, we can avoid loading the string from memory
3131 and only stored the computed constants. */
3132 if (src_str
3133 && CONST_INT_P (len_rtx)
3134 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3135 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3136 CONST_CAST (char *, src_str),
3137 dest_align, false))
3138 {
3139 dest_mem = get_memory_rtx (dest, len);
3140 set_mem_align (dest_mem, dest_align);
3141 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3142 builtin_memcpy_read_str,
3143 CONST_CAST (char *, src_str),
3144 dest_align, false, endp);
3145 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3146 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3147 return dest_mem;
3148 }
3149
3150 if (CONST_INT_P (len_rtx)
3151 && can_move_by_pieces (INTVAL (len_rtx),
3152 MIN (dest_align, src_align)))
3153 {
3154 dest_mem = get_memory_rtx (dest, len);
3155 set_mem_align (dest_mem, dest_align);
3156 src_mem = get_memory_rtx (src, len);
3157 set_mem_align (src_mem, src_align);
3158 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3159 MIN (dest_align, src_align), endp);
3160 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3161 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3162 return dest_mem;
3163 }
3164
3165 return NULL_RTX;
3166 }
3167 }
3168
3169 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3170 we failed, the caller should emit a normal call, otherwise try to
3171 get the result in TARGET, if convenient. If ENDP is 0 return the
3172 destination pointer, if ENDP is 1 return the end pointer ala
3173 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3174 stpcpy. */
3175
3176 static rtx
3177 expand_movstr (tree dest, tree src, rtx target, int endp)
3178 {
3179 struct expand_operand ops[3];
3180 rtx dest_mem;
3181 rtx src_mem;
3182
3183 if (!targetm.have_movstr ())
3184 return NULL_RTX;
3185
3186 dest_mem = get_memory_rtx (dest, NULL);
3187 src_mem = get_memory_rtx (src, NULL);
3188 if (!endp)
3189 {
3190 target = force_reg (Pmode, XEXP (dest_mem, 0));
3191 dest_mem = replace_equiv_address (dest_mem, target);
3192 }
3193
3194 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3195 create_fixed_operand (&ops[1], dest_mem);
3196 create_fixed_operand (&ops[2], src_mem);
3197 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3198 return NULL_RTX;
3199
3200 if (endp && target != const0_rtx)
3201 {
3202 target = ops[0].value;
3203 /* movstr is supposed to set end to the address of the NUL
3204 terminator. If the caller requested a mempcpy-like return value,
3205 adjust it. */
3206 if (endp == 1)
3207 {
3208 rtx tem = plus_constant (GET_MODE (target),
3209 gen_lowpart (GET_MODE (target), target), 1);
3210 emit_move_insn (target, force_operand (tem, NULL_RTX));
3211 }
3212 }
3213 return target;
3214 }
3215
3216 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3217 NULL_RTX if we failed the caller should emit a normal call, otherwise
3218 try to get the result in TARGET, if convenient (and in mode MODE if that's
3219 convenient). */
3220
3221 static rtx
3222 expand_builtin_strcpy (tree exp, rtx target)
3223 {
3224 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3225 {
3226 tree dest = CALL_EXPR_ARG (exp, 0);
3227 tree src = CALL_EXPR_ARG (exp, 1);
3228 return expand_builtin_strcpy_args (dest, src, target);
3229 }
3230 return NULL_RTX;
3231 }
3232
3233 /* Helper function to do the actual work for expand_builtin_strcpy. The
3234 arguments to the builtin_strcpy call DEST and SRC are broken out
3235 so that this can also be called without constructing an actual CALL_EXPR.
3236 The other arguments and return value are the same as for
3237 expand_builtin_strcpy. */
3238
3239 static rtx
3240 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3241 {
3242 return expand_movstr (dest, src, target, /*endp=*/0);
3243 }
3244
3245 /* Expand a call EXP to the stpcpy builtin.
3246 Return NULL_RTX if we failed the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3249
3250 static rtx
3251 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3252 {
3253 tree dst, src;
3254 location_t loc = EXPR_LOCATION (exp);
3255
3256 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3257 return NULL_RTX;
3258
3259 dst = CALL_EXPR_ARG (exp, 0);
3260 src = CALL_EXPR_ARG (exp, 1);
3261
3262 /* If return value is ignored, transform stpcpy into strcpy. */
3263 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3264 {
3265 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3266 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3267 return expand_expr (result, target, mode, EXPAND_NORMAL);
3268 }
3269 else
3270 {
3271 tree len, lenp1;
3272 rtx ret;
3273
3274 /* Ensure we get an actual string whose length can be evaluated at
3275 compile-time, not an expression containing a string. This is
3276 because the latter will potentially produce pessimized code
3277 when used to produce the return value. */
3278 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3279 return expand_movstr (dst, src, target, /*endp=*/2);
3280
3281 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3282 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3283 target, mode, /*endp=*/2,
3284 exp);
3285
3286 if (ret)
3287 return ret;
3288
3289 if (TREE_CODE (len) == INTEGER_CST)
3290 {
3291 rtx len_rtx = expand_normal (len);
3292
3293 if (CONST_INT_P (len_rtx))
3294 {
3295 ret = expand_builtin_strcpy_args (dst, src, target);
3296
3297 if (ret)
3298 {
3299 if (! target)
3300 {
3301 if (mode != VOIDmode)
3302 target = gen_reg_rtx (mode);
3303 else
3304 target = gen_reg_rtx (GET_MODE (ret));
3305 }
3306 if (GET_MODE (target) != GET_MODE (ret))
3307 ret = gen_lowpart (GET_MODE (target), ret);
3308
3309 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3310 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3311 gcc_assert (ret);
3312
3313 return target;
3314 }
3315 }
3316 }
3317
3318 return expand_movstr (dst, src, target, /*endp=*/2);
3319 }
3320 }
3321
3322 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3323 bytes from constant string DATA + OFFSET and return it as target
3324 constant. */
3325
3326 rtx
3327 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3328 machine_mode mode)
3329 {
3330 const char *str = (const char *) data;
3331
3332 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3333 return const0_rtx;
3334
3335 return c_readstr (str + offset, mode);
3336 }
3337
3338 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3339 NULL_RTX if we failed the caller should emit a normal call. */
3340
3341 static rtx
3342 expand_builtin_strncpy (tree exp, rtx target)
3343 {
3344 location_t loc = EXPR_LOCATION (exp);
3345
3346 if (validate_arglist (exp,
3347 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3348 {
3349 tree dest = CALL_EXPR_ARG (exp, 0);
3350 tree src = CALL_EXPR_ARG (exp, 1);
3351 tree len = CALL_EXPR_ARG (exp, 2);
3352 tree slen = c_strlen (src, 1);
3353
3354 /* We must be passed a constant len and src parameter. */
3355 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3356 return NULL_RTX;
3357
3358 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3359
3360 /* We're required to pad with trailing zeros if the requested
3361 len is greater than strlen(s2)+1. In that case try to
3362 use store_by_pieces, if it fails, punt. */
3363 if (tree_int_cst_lt (slen, len))
3364 {
3365 unsigned int dest_align = get_pointer_alignment (dest);
3366 const char *p = c_getstr (src);
3367 rtx dest_mem;
3368
3369 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3370 || !can_store_by_pieces (tree_to_uhwi (len),
3371 builtin_strncpy_read_str,
3372 CONST_CAST (char *, p),
3373 dest_align, false))
3374 return NULL_RTX;
3375
3376 dest_mem = get_memory_rtx (dest, len);
3377 store_by_pieces (dest_mem, tree_to_uhwi (len),
3378 builtin_strncpy_read_str,
3379 CONST_CAST (char *, p), dest_align, false, 0);
3380 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3381 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3382 return dest_mem;
3383 }
3384 }
3385 return NULL_RTX;
3386 }
3387
3388 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3389 bytes from constant string DATA + OFFSET and return it as target
3390 constant. */
3391
3392 rtx
3393 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3394 machine_mode mode)
3395 {
3396 const char *c = (const char *) data;
3397 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3398
3399 memset (p, *c, GET_MODE_SIZE (mode));
3400
3401 return c_readstr (p, mode);
3402 }
3403
3404 /* Callback routine for store_by_pieces. Return the RTL of a register
3405 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3406 char value given in the RTL register data. For example, if mode is
3407 4 bytes wide, return the RTL for 0x01010101*data. */
3408
3409 static rtx
3410 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3411 machine_mode mode)
3412 {
3413 rtx target, coeff;
3414 size_t size;
3415 char *p;
3416
3417 size = GET_MODE_SIZE (mode);
3418 if (size == 1)
3419 return (rtx) data;
3420
3421 p = XALLOCAVEC (char, size);
3422 memset (p, 1, size);
3423 coeff = c_readstr (p, mode);
3424
3425 target = convert_to_mode (mode, (rtx) data, 1);
3426 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3427 return force_reg (mode, target);
3428 }
3429
3430 /* Expand expression EXP, which is a call to the memset builtin. Return
3431 NULL_RTX if we failed the caller should emit a normal call, otherwise
3432 try to get the result in TARGET, if convenient (and in mode MODE if that's
3433 convenient). */
3434
3435 static rtx
3436 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3437 {
3438 if (!validate_arglist (exp,
3439 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3440 return NULL_RTX;
3441 else
3442 {
3443 tree dest = CALL_EXPR_ARG (exp, 0);
3444 tree val = CALL_EXPR_ARG (exp, 1);
3445 tree len = CALL_EXPR_ARG (exp, 2);
3446 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3447 }
3448 }
3449
3450 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3451 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3452 try to get the result in TARGET, if convenient (and in mode MODE if that's
3453 convenient). */
3454
3455 static rtx
3456 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3457 {
3458 if (!validate_arglist (exp,
3459 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3460 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3461 return NULL_RTX;
3462 else
3463 {
3464 tree dest = CALL_EXPR_ARG (exp, 0);
3465 tree val = CALL_EXPR_ARG (exp, 2);
3466 tree len = CALL_EXPR_ARG (exp, 3);
3467 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3468
3469 /* Return src bounds with the result. */
3470 if (res)
3471 {
3472 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3473 expand_normal (CALL_EXPR_ARG (exp, 1)));
3474 res = chkp_join_splitted_slot (res, bnd);
3475 }
3476 return res;
3477 }
3478 }
3479
3480 /* Helper function to do the actual work for expand_builtin_memset. The
3481 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3482 so that this can also be called without constructing an actual CALL_EXPR.
3483 The other arguments and return value are the same as for
3484 expand_builtin_memset. */
3485
3486 static rtx
3487 expand_builtin_memset_args (tree dest, tree val, tree len,
3488 rtx target, machine_mode mode, tree orig_exp)
3489 {
3490 tree fndecl, fn;
3491 enum built_in_function fcode;
3492 machine_mode val_mode;
3493 char c;
3494 unsigned int dest_align;
3495 rtx dest_mem, dest_addr, len_rtx;
3496 HOST_WIDE_INT expected_size = -1;
3497 unsigned int expected_align = 0;
3498 unsigned HOST_WIDE_INT min_size;
3499 unsigned HOST_WIDE_INT max_size;
3500 unsigned HOST_WIDE_INT probable_max_size;
3501
3502 dest_align = get_pointer_alignment (dest);
3503
3504 /* If DEST is not a pointer type, don't do this operation in-line. */
3505 if (dest_align == 0)
3506 return NULL_RTX;
3507
3508 if (currently_expanding_gimple_stmt)
3509 stringop_block_profile (currently_expanding_gimple_stmt,
3510 &expected_align, &expected_size);
3511
3512 if (expected_align < dest_align)
3513 expected_align = dest_align;
3514
3515 /* If the LEN parameter is zero, return DEST. */
3516 if (integer_zerop (len))
3517 {
3518 /* Evaluate and ignore VAL in case it has side-effects. */
3519 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3520 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3521 }
3522
3523 /* Stabilize the arguments in case we fail. */
3524 dest = builtin_save_expr (dest);
3525 val = builtin_save_expr (val);
3526 len = builtin_save_expr (len);
3527
3528 len_rtx = expand_normal (len);
3529 determine_block_size (len, len_rtx, &min_size, &max_size,
3530 &probable_max_size);
3531 dest_mem = get_memory_rtx (dest, len);
3532 val_mode = TYPE_MODE (unsigned_char_type_node);
3533
3534 if (TREE_CODE (val) != INTEGER_CST)
3535 {
3536 rtx val_rtx;
3537
3538 val_rtx = expand_normal (val);
3539 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3540
3541 /* Assume that we can memset by pieces if we can store
3542 * the coefficients by pieces (in the required modes).
3543 * We can't pass builtin_memset_gen_str as that emits RTL. */
3544 c = 1;
3545 if (tree_fits_uhwi_p (len)
3546 && can_store_by_pieces (tree_to_uhwi (len),
3547 builtin_memset_read_str, &c, dest_align,
3548 true))
3549 {
3550 val_rtx = force_reg (val_mode, val_rtx);
3551 store_by_pieces (dest_mem, tree_to_uhwi (len),
3552 builtin_memset_gen_str, val_rtx, dest_align,
3553 true, 0);
3554 }
3555 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3556 dest_align, expected_align,
3557 expected_size, min_size, max_size,
3558 probable_max_size))
3559 goto do_libcall;
3560
3561 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3562 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3563 return dest_mem;
3564 }
3565
3566 if (target_char_cast (val, &c))
3567 goto do_libcall;
3568
3569 if (c)
3570 {
3571 if (tree_fits_uhwi_p (len)
3572 && can_store_by_pieces (tree_to_uhwi (len),
3573 builtin_memset_read_str, &c, dest_align,
3574 true))
3575 store_by_pieces (dest_mem, tree_to_uhwi (len),
3576 builtin_memset_read_str, &c, dest_align, true, 0);
3577 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3578 gen_int_mode (c, val_mode),
3579 dest_align, expected_align,
3580 expected_size, min_size, max_size,
3581 probable_max_size))
3582 goto do_libcall;
3583
3584 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3585 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3586 return dest_mem;
3587 }
3588
3589 set_mem_align (dest_mem, dest_align);
3590 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3591 CALL_EXPR_TAILCALL (orig_exp)
3592 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3593 expected_align, expected_size,
3594 min_size, max_size,
3595 probable_max_size);
3596
3597 if (dest_addr == 0)
3598 {
3599 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3600 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3601 }
3602
3603 return dest_addr;
3604
3605 do_libcall:
3606 fndecl = get_callee_fndecl (orig_exp);
3607 fcode = DECL_FUNCTION_CODE (fndecl);
3608 if (fcode == BUILT_IN_MEMSET
3609 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3610 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3611 dest, val, len);
3612 else if (fcode == BUILT_IN_BZERO)
3613 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3614 dest, len);
3615 else
3616 gcc_unreachable ();
3617 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3618 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3619 return expand_call (fn, target, target == const0_rtx);
3620 }
3621
3622 /* Expand expression EXP, which is a call to the bzero builtin. Return
3623 NULL_RTX if we failed the caller should emit a normal call. */
3624
3625 static rtx
3626 expand_builtin_bzero (tree exp)
3627 {
3628 tree dest, size;
3629 location_t loc = EXPR_LOCATION (exp);
3630
3631 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3632 return NULL_RTX;
3633
3634 dest = CALL_EXPR_ARG (exp, 0);
3635 size = CALL_EXPR_ARG (exp, 1);
3636
3637 /* New argument list transforming bzero(ptr x, int y) to
3638 memset(ptr x, int 0, size_t y). This is done this way
3639 so that if it isn't expanded inline, we fallback to
3640 calling bzero instead of memset. */
3641
3642 return expand_builtin_memset_args (dest, integer_zero_node,
3643 fold_convert_loc (loc,
3644 size_type_node, size),
3645 const0_rtx, VOIDmode, exp);
3646 }
3647
3648 /* Try to expand cmpstr operation ICODE with the given operands.
3649 Return the result rtx on success, otherwise return null. */
3650
3651 static rtx
3652 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3653 HOST_WIDE_INT align)
3654 {
3655 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3656
3657 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3658 target = NULL_RTX;
3659
3660 struct expand_operand ops[4];
3661 create_output_operand (&ops[0], target, insn_mode);
3662 create_fixed_operand (&ops[1], arg1_rtx);
3663 create_fixed_operand (&ops[2], arg2_rtx);
3664 create_integer_operand (&ops[3], align);
3665 if (maybe_expand_insn (icode, 4, ops))
3666 return ops[0].value;
3667 return NULL_RTX;
3668 }
3669
3670 /* Expand expression EXP, which is a call to the memcmp built-in function.
3671 Return NULL_RTX if we failed and the caller should emit a normal call,
3672 otherwise try to get the result in TARGET, if convenient.
3673 RESULT_EQ is true if we can relax the returned value to be either zero
3674 or nonzero, without caring about the sign. */
3675
3676 static rtx
3677 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3678 {
3679 if (!validate_arglist (exp,
3680 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3681 return NULL_RTX;
3682
3683 tree arg1 = CALL_EXPR_ARG (exp, 0);
3684 tree arg2 = CALL_EXPR_ARG (exp, 1);
3685 tree len = CALL_EXPR_ARG (exp, 2);
3686 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3687 location_t loc = EXPR_LOCATION (exp);
3688
3689 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3690 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3691
3692 /* If we don't have POINTER_TYPE, call the function. */
3693 if (arg1_align == 0 || arg2_align == 0)
3694 return NULL_RTX;
3695
3696 rtx arg1_rtx = get_memory_rtx (arg1, len);
3697 rtx arg2_rtx = get_memory_rtx (arg2, len);
3698 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3699
3700 /* Set MEM_SIZE as appropriate. */
3701 if (CONST_INT_P (len_rtx))
3702 {
3703 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3704 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3705 }
3706
3707 by_pieces_constfn constfn = NULL;
3708
3709 const char *src_str = c_getstr (arg1);
3710 if (src_str == NULL)
3711 src_str = c_getstr (arg2);
3712 else
3713 std::swap (arg1_rtx, arg2_rtx);
3714
3715 /* If SRC is a string constant and block move would be done
3716 by pieces, we can avoid loading the string from memory
3717 and only stored the computed constants. */
3718 if (src_str
3719 && CONST_INT_P (len_rtx)
3720 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3721 constfn = builtin_memcpy_read_str;
3722
3723 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3724 TREE_TYPE (len), target,
3725 result_eq, constfn,
3726 CONST_CAST (char *, src_str));
3727
3728 if (result)
3729 {
3730 /* Return the value in the proper mode for this function. */
3731 if (GET_MODE (result) == mode)
3732 return result;
3733
3734 if (target != 0)
3735 {
3736 convert_move (target, result, 0);
3737 return target;
3738 }
3739
3740 return convert_to_mode (mode, result, 0);
3741 }
3742
3743 return NULL_RTX;
3744 }
3745
3746 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3747 if we failed the caller should emit a normal call, otherwise try to get
3748 the result in TARGET, if convenient. */
3749
3750 static rtx
3751 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3752 {
3753 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3754 return NULL_RTX;
3755
3756 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3757 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3758 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3759 {
3760 rtx arg1_rtx, arg2_rtx;
3761 tree fndecl, fn;
3762 tree arg1 = CALL_EXPR_ARG (exp, 0);
3763 tree arg2 = CALL_EXPR_ARG (exp, 1);
3764 rtx result = NULL_RTX;
3765
3766 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3767 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3768
3769 /* If we don't have POINTER_TYPE, call the function. */
3770 if (arg1_align == 0 || arg2_align == 0)
3771 return NULL_RTX;
3772
3773 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3774 arg1 = builtin_save_expr (arg1);
3775 arg2 = builtin_save_expr (arg2);
3776
3777 arg1_rtx = get_memory_rtx (arg1, NULL);
3778 arg2_rtx = get_memory_rtx (arg2, NULL);
3779
3780 /* Try to call cmpstrsi. */
3781 if (cmpstr_icode != CODE_FOR_nothing)
3782 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3783 MIN (arg1_align, arg2_align));
3784
3785 /* Try to determine at least one length and call cmpstrnsi. */
3786 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3787 {
3788 tree len;
3789 rtx arg3_rtx;
3790
3791 tree len1 = c_strlen (arg1, 1);
3792 tree len2 = c_strlen (arg2, 1);
3793
3794 if (len1)
3795 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3796 if (len2)
3797 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3798
3799 /* If we don't have a constant length for the first, use the length
3800 of the second, if we know it. We don't require a constant for
3801 this case; some cost analysis could be done if both are available
3802 but neither is constant. For now, assume they're equally cheap,
3803 unless one has side effects. If both strings have constant lengths,
3804 use the smaller. */
3805
3806 if (!len1)
3807 len = len2;
3808 else if (!len2)
3809 len = len1;
3810 else if (TREE_SIDE_EFFECTS (len1))
3811 len = len2;
3812 else if (TREE_SIDE_EFFECTS (len2))
3813 len = len1;
3814 else if (TREE_CODE (len1) != INTEGER_CST)
3815 len = len2;
3816 else if (TREE_CODE (len2) != INTEGER_CST)
3817 len = len1;
3818 else if (tree_int_cst_lt (len1, len2))
3819 len = len1;
3820 else
3821 len = len2;
3822
3823 /* If both arguments have side effects, we cannot optimize. */
3824 if (len && !TREE_SIDE_EFFECTS (len))
3825 {
3826 arg3_rtx = expand_normal (len);
3827 result = expand_cmpstrn_or_cmpmem
3828 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3829 arg3_rtx, MIN (arg1_align, arg2_align));
3830 }
3831 }
3832
3833 if (result)
3834 {
3835 /* Return the value in the proper mode for this function. */
3836 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3837 if (GET_MODE (result) == mode)
3838 return result;
3839 if (target == 0)
3840 return convert_to_mode (mode, result, 0);
3841 convert_move (target, result, 0);
3842 return target;
3843 }
3844
3845 /* Expand the library call ourselves using a stabilized argument
3846 list to avoid re-evaluating the function's arguments twice. */
3847 fndecl = get_callee_fndecl (exp);
3848 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3849 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3850 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3851 return expand_call (fn, target, target == const0_rtx);
3852 }
3853 return NULL_RTX;
3854 }
3855
3856 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3857 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3858 the result in TARGET, if convenient. */
3859
3860 static rtx
3861 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3862 ATTRIBUTE_UNUSED machine_mode mode)
3863 {
3864 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3865
3866 if (!validate_arglist (exp,
3867 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3868 return NULL_RTX;
3869
3870 /* If c_strlen can determine an expression for one of the string
3871 lengths, and it doesn't have side effects, then emit cmpstrnsi
3872 using length MIN(strlen(string)+1, arg3). */
3873 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3874 if (cmpstrn_icode != CODE_FOR_nothing)
3875 {
3876 tree len, len1, len2;
3877 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3878 rtx result;
3879 tree fndecl, fn;
3880 tree arg1 = CALL_EXPR_ARG (exp, 0);
3881 tree arg2 = CALL_EXPR_ARG (exp, 1);
3882 tree arg3 = CALL_EXPR_ARG (exp, 2);
3883
3884 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3885 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3886
3887 len1 = c_strlen (arg1, 1);
3888 len2 = c_strlen (arg2, 1);
3889
3890 if (len1)
3891 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3892 if (len2)
3893 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3894
3895 /* If we don't have a constant length for the first, use the length
3896 of the second, if we know it. We don't require a constant for
3897 this case; some cost analysis could be done if both are available
3898 but neither is constant. For now, assume they're equally cheap,
3899 unless one has side effects. If both strings have constant lengths,
3900 use the smaller. */
3901
3902 if (!len1)
3903 len = len2;
3904 else if (!len2)
3905 len = len1;
3906 else if (TREE_SIDE_EFFECTS (len1))
3907 len = len2;
3908 else if (TREE_SIDE_EFFECTS (len2))
3909 len = len1;
3910 else if (TREE_CODE (len1) != INTEGER_CST)
3911 len = len2;
3912 else if (TREE_CODE (len2) != INTEGER_CST)
3913 len = len1;
3914 else if (tree_int_cst_lt (len1, len2))
3915 len = len1;
3916 else
3917 len = len2;
3918
3919 /* If both arguments have side effects, we cannot optimize. */
3920 if (!len || TREE_SIDE_EFFECTS (len))
3921 return NULL_RTX;
3922
3923 /* The actual new length parameter is MIN(len,arg3). */
3924 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3925 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3926
3927 /* If we don't have POINTER_TYPE, call the function. */
3928 if (arg1_align == 0 || arg2_align == 0)
3929 return NULL_RTX;
3930
3931 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3932 arg1 = builtin_save_expr (arg1);
3933 arg2 = builtin_save_expr (arg2);
3934 len = builtin_save_expr (len);
3935
3936 arg1_rtx = get_memory_rtx (arg1, len);
3937 arg2_rtx = get_memory_rtx (arg2, len);
3938 arg3_rtx = expand_normal (len);
3939 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3940 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3941 MIN (arg1_align, arg2_align));
3942 if (result)
3943 {
3944 /* Return the value in the proper mode for this function. */
3945 mode = TYPE_MODE (TREE_TYPE (exp));
3946 if (GET_MODE (result) == mode)
3947 return result;
3948 if (target == 0)
3949 return convert_to_mode (mode, result, 0);
3950 convert_move (target, result, 0);
3951 return target;
3952 }
3953
3954 /* Expand the library call ourselves using a stabilized argument
3955 list to avoid re-evaluating the function's arguments twice. */
3956 fndecl = get_callee_fndecl (exp);
3957 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3958 arg1, arg2, len);
3959 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3960 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3961 return expand_call (fn, target, target == const0_rtx);
3962 }
3963 return NULL_RTX;
3964 }
3965
3966 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3967 if that's convenient. */
3968
3969 rtx
3970 expand_builtin_saveregs (void)
3971 {
3972 rtx val;
3973 rtx_insn *seq;
3974
3975 /* Don't do __builtin_saveregs more than once in a function.
3976 Save the result of the first call and reuse it. */
3977 if (saveregs_value != 0)
3978 return saveregs_value;
3979
3980 /* When this function is called, it means that registers must be
3981 saved on entry to this function. So we migrate the call to the
3982 first insn of this function. */
3983
3984 start_sequence ();
3985
3986 /* Do whatever the machine needs done in this case. */
3987 val = targetm.calls.expand_builtin_saveregs ();
3988
3989 seq = get_insns ();
3990 end_sequence ();
3991
3992 saveregs_value = val;
3993
3994 /* Put the insns after the NOTE that starts the function. If this
3995 is inside a start_sequence, make the outer-level insn chain current, so
3996 the code is placed at the start of the function. */
3997 push_topmost_sequence ();
3998 emit_insn_after (seq, entry_of_function ());
3999 pop_topmost_sequence ();
4000
4001 return val;
4002 }
4003
4004 /* Expand a call to __builtin_next_arg. */
4005
4006 static rtx
4007 expand_builtin_next_arg (void)
4008 {
4009 /* Checking arguments is already done in fold_builtin_next_arg
4010 that must be called before this function. */
4011 return expand_binop (ptr_mode, add_optab,
4012 crtl->args.internal_arg_pointer,
4013 crtl->args.arg_offset_rtx,
4014 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4015 }
4016
4017 /* Make it easier for the backends by protecting the valist argument
4018 from multiple evaluations. */
4019
4020 static tree
4021 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4022 {
4023 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4024
4025 /* The current way of determining the type of valist is completely
4026 bogus. We should have the information on the va builtin instead. */
4027 if (!vatype)
4028 vatype = targetm.fn_abi_va_list (cfun->decl);
4029
4030 if (TREE_CODE (vatype) == ARRAY_TYPE)
4031 {
4032 if (TREE_SIDE_EFFECTS (valist))
4033 valist = save_expr (valist);
4034
4035 /* For this case, the backends will be expecting a pointer to
4036 vatype, but it's possible we've actually been given an array
4037 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4038 So fix it. */
4039 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4040 {
4041 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4042 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4043 }
4044 }
4045 else
4046 {
4047 tree pt = build_pointer_type (vatype);
4048
4049 if (! needs_lvalue)
4050 {
4051 if (! TREE_SIDE_EFFECTS (valist))
4052 return valist;
4053
4054 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4055 TREE_SIDE_EFFECTS (valist) = 1;
4056 }
4057
4058 if (TREE_SIDE_EFFECTS (valist))
4059 valist = save_expr (valist);
4060 valist = fold_build2_loc (loc, MEM_REF,
4061 vatype, valist, build_int_cst (pt, 0));
4062 }
4063
4064 return valist;
4065 }
4066
4067 /* The "standard" definition of va_list is void*. */
4068
4069 tree
4070 std_build_builtin_va_list (void)
4071 {
4072 return ptr_type_node;
4073 }
4074
4075 /* The "standard" abi va_list is va_list_type_node. */
4076
4077 tree
4078 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4079 {
4080 return va_list_type_node;
4081 }
4082
4083 /* The "standard" type of va_list is va_list_type_node. */
4084
4085 tree
4086 std_canonical_va_list_type (tree type)
4087 {
4088 tree wtype, htype;
4089
4090 wtype = va_list_type_node;
4091 htype = type;
4092 /* Treat structure va_list types. */
4093 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4094 htype = TREE_TYPE (htype);
4095 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4096 {
4097 /* If va_list is an array type, the argument may have decayed
4098 to a pointer type, e.g. by being passed to another function.
4099 In that case, unwrap both types so that we can compare the
4100 underlying records. */
4101 if (TREE_CODE (htype) == ARRAY_TYPE
4102 || POINTER_TYPE_P (htype))
4103 {
4104 wtype = TREE_TYPE (wtype);
4105 htype = TREE_TYPE (htype);
4106 }
4107 }
4108 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4109 return va_list_type_node;
4110
4111 return NULL_TREE;
4112 }
4113
4114 /* The "standard" implementation of va_start: just assign `nextarg' to
4115 the variable. */
4116
4117 void
4118 std_expand_builtin_va_start (tree valist, rtx nextarg)
4119 {
4120 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4121 convert_move (va_r, nextarg, 0);
4122
4123 /* We do not have any valid bounds for the pointer, so
4124 just store zero bounds for it. */
4125 if (chkp_function_instrumented_p (current_function_decl))
4126 chkp_expand_bounds_reset_for_mem (valist,
4127 make_tree (TREE_TYPE (valist),
4128 nextarg));
4129 }
4130
4131 /* Expand EXP, a call to __builtin_va_start. */
4132
4133 static rtx
4134 expand_builtin_va_start (tree exp)
4135 {
4136 rtx nextarg;
4137 tree valist;
4138 location_t loc = EXPR_LOCATION (exp);
4139
4140 if (call_expr_nargs (exp) < 2)
4141 {
4142 error_at (loc, "too few arguments to function %<va_start%>");
4143 return const0_rtx;
4144 }
4145
4146 if (fold_builtin_next_arg (exp, true))
4147 return const0_rtx;
4148
4149 nextarg = expand_builtin_next_arg ();
4150 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4151
4152 if (targetm.expand_builtin_va_start)
4153 targetm.expand_builtin_va_start (valist, nextarg);
4154 else
4155 std_expand_builtin_va_start (valist, nextarg);
4156
4157 return const0_rtx;
4158 }
4159
4160 /* Expand EXP, a call to __builtin_va_end. */
4161
4162 static rtx
4163 expand_builtin_va_end (tree exp)
4164 {
4165 tree valist = CALL_EXPR_ARG (exp, 0);
4166
4167 /* Evaluate for side effects, if needed. I hate macros that don't
4168 do that. */
4169 if (TREE_SIDE_EFFECTS (valist))
4170 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4171
4172 return const0_rtx;
4173 }
4174
4175 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4176 builtin rather than just as an assignment in stdarg.h because of the
4177 nastiness of array-type va_list types. */
4178
4179 static rtx
4180 expand_builtin_va_copy (tree exp)
4181 {
4182 tree dst, src, t;
4183 location_t loc = EXPR_LOCATION (exp);
4184
4185 dst = CALL_EXPR_ARG (exp, 0);
4186 src = CALL_EXPR_ARG (exp, 1);
4187
4188 dst = stabilize_va_list_loc (loc, dst, 1);
4189 src = stabilize_va_list_loc (loc, src, 0);
4190
4191 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4192
4193 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4194 {
4195 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4196 TREE_SIDE_EFFECTS (t) = 1;
4197 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4198 }
4199 else
4200 {
4201 rtx dstb, srcb, size;
4202
4203 /* Evaluate to pointers. */
4204 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4205 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4206 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4207 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4208
4209 dstb = convert_memory_address (Pmode, dstb);
4210 srcb = convert_memory_address (Pmode, srcb);
4211
4212 /* "Dereference" to BLKmode memories. */
4213 dstb = gen_rtx_MEM (BLKmode, dstb);
4214 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4215 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4216 srcb = gen_rtx_MEM (BLKmode, srcb);
4217 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4218 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4219
4220 /* Copy. */
4221 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4222 }
4223
4224 return const0_rtx;
4225 }
4226
4227 /* Expand a call to one of the builtin functions __builtin_frame_address or
4228 __builtin_return_address. */
4229
4230 static rtx
4231 expand_builtin_frame_address (tree fndecl, tree exp)
4232 {
4233 /* The argument must be a nonnegative integer constant.
4234 It counts the number of frames to scan up the stack.
4235 The value is either the frame pointer value or the return
4236 address saved in that frame. */
4237 if (call_expr_nargs (exp) == 0)
4238 /* Warning about missing arg was already issued. */
4239 return const0_rtx;
4240 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4241 {
4242 error ("invalid argument to %qD", fndecl);
4243 return const0_rtx;
4244 }
4245 else
4246 {
4247 /* Number of frames to scan up the stack. */
4248 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4249
4250 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4251
4252 /* Some ports cannot access arbitrary stack frames. */
4253 if (tem == NULL)
4254 {
4255 warning (0, "unsupported argument to %qD", fndecl);
4256 return const0_rtx;
4257 }
4258
4259 if (count)
4260 {
4261 /* Warn since no effort is made to ensure that any frame
4262 beyond the current one exists or can be safely reached. */
4263 warning (OPT_Wframe_address, "calling %qD with "
4264 "a nonzero argument is unsafe", fndecl);
4265 }
4266
4267 /* For __builtin_frame_address, return what we've got. */
4268 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4269 return tem;
4270
4271 if (!REG_P (tem)
4272 && ! CONSTANT_P (tem))
4273 tem = copy_addr_to_reg (tem);
4274 return tem;
4275 }
4276 }
4277
4278 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4279 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4280 is the same as for allocate_dynamic_stack_space. */
4281
4282 static rtx
4283 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4284 {
4285 rtx op0;
4286 rtx result;
4287 bool valid_arglist;
4288 unsigned int align;
4289 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4290 == BUILT_IN_ALLOCA_WITH_ALIGN);
4291
4292 valid_arglist
4293 = (alloca_with_align
4294 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4295 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4296
4297 if (!valid_arglist)
4298 return NULL_RTX;
4299
4300 /* Compute the argument. */
4301 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4302
4303 /* Compute the alignment. */
4304 align = (alloca_with_align
4305 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4306 : BIGGEST_ALIGNMENT);
4307
4308 /* Allocate the desired space. */
4309 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4310 result = convert_memory_address (ptr_mode, result);
4311
4312 return result;
4313 }
4314
4315 /* Expand a call to bswap builtin in EXP.
4316 Return NULL_RTX if a normal call should be emitted rather than expanding the
4317 function in-line. If convenient, the result should be placed in TARGET.
4318 SUBTARGET may be used as the target for computing one of EXP's operands. */
4319
4320 static rtx
4321 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4322 rtx subtarget)
4323 {
4324 tree arg;
4325 rtx op0;
4326
4327 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4328 return NULL_RTX;
4329
4330 arg = CALL_EXPR_ARG (exp, 0);
4331 op0 = expand_expr (arg,
4332 subtarget && GET_MODE (subtarget) == target_mode
4333 ? subtarget : NULL_RTX,
4334 target_mode, EXPAND_NORMAL);
4335 if (GET_MODE (op0) != target_mode)
4336 op0 = convert_to_mode (target_mode, op0, 1);
4337
4338 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4339
4340 gcc_assert (target);
4341
4342 return convert_to_mode (target_mode, target, 1);
4343 }
4344
4345 /* Expand a call to a unary builtin in EXP.
4346 Return NULL_RTX if a normal call should be emitted rather than expanding the
4347 function in-line. If convenient, the result should be placed in TARGET.
4348 SUBTARGET may be used as the target for computing one of EXP's operands. */
4349
4350 static rtx
4351 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4352 rtx subtarget, optab op_optab)
4353 {
4354 rtx op0;
4355
4356 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4357 return NULL_RTX;
4358
4359 /* Compute the argument. */
4360 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4361 (subtarget
4362 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4363 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4364 VOIDmode, EXPAND_NORMAL);
4365 /* Compute op, into TARGET if possible.
4366 Set TARGET to wherever the result comes back. */
4367 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4368 op_optab, op0, target, op_optab != clrsb_optab);
4369 gcc_assert (target);
4370
4371 return convert_to_mode (target_mode, target, 0);
4372 }
4373
4374 /* Expand a call to __builtin_expect. We just return our argument
4375 as the builtin_expect semantic should've been already executed by
4376 tree branch prediction pass. */
4377
4378 static rtx
4379 expand_builtin_expect (tree exp, rtx target)
4380 {
4381 tree arg;
4382
4383 if (call_expr_nargs (exp) < 2)
4384 return const0_rtx;
4385 arg = CALL_EXPR_ARG (exp, 0);
4386
4387 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4388 /* When guessing was done, the hints should be already stripped away. */
4389 gcc_assert (!flag_guess_branch_prob
4390 || optimize == 0 || seen_error ());
4391 return target;
4392 }
4393
4394 /* Expand a call to __builtin_assume_aligned. We just return our first
4395 argument as the builtin_assume_aligned semantic should've been already
4396 executed by CCP. */
4397
4398 static rtx
4399 expand_builtin_assume_aligned (tree exp, rtx target)
4400 {
4401 if (call_expr_nargs (exp) < 2)
4402 return const0_rtx;
4403 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4404 EXPAND_NORMAL);
4405 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4406 && (call_expr_nargs (exp) < 3
4407 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4408 return target;
4409 }
4410
4411 void
4412 expand_builtin_trap (void)
4413 {
4414 if (targetm.have_trap ())
4415 {
4416 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4417 /* For trap insns when not accumulating outgoing args force
4418 REG_ARGS_SIZE note to prevent crossjumping of calls with
4419 different args sizes. */
4420 if (!ACCUMULATE_OUTGOING_ARGS)
4421 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4422 }
4423 else
4424 {
4425 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4426 tree call_expr = build_call_expr (fn, 0);
4427 expand_call (call_expr, NULL_RTX, false);
4428 }
4429
4430 emit_barrier ();
4431 }
4432
4433 /* Expand a call to __builtin_unreachable. We do nothing except emit
4434 a barrier saying that control flow will not pass here.
4435
4436 It is the responsibility of the program being compiled to ensure
4437 that control flow does never reach __builtin_unreachable. */
4438 static void
4439 expand_builtin_unreachable (void)
4440 {
4441 emit_barrier ();
4442 }
4443
4444 /* Expand EXP, a call to fabs, fabsf or fabsl.
4445 Return NULL_RTX if a normal call should be emitted rather than expanding
4446 the function inline. If convenient, the result should be placed
4447 in TARGET. SUBTARGET may be used as the target for computing
4448 the operand. */
4449
4450 static rtx
4451 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4452 {
4453 machine_mode mode;
4454 tree arg;
4455 rtx op0;
4456
4457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4458 return NULL_RTX;
4459
4460 arg = CALL_EXPR_ARG (exp, 0);
4461 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4462 mode = TYPE_MODE (TREE_TYPE (arg));
4463 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4464 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4465 }
4466
4467 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4468 Return NULL is a normal call should be emitted rather than expanding the
4469 function inline. If convenient, the result should be placed in TARGET.
4470 SUBTARGET may be used as the target for computing the operand. */
4471
4472 static rtx
4473 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4474 {
4475 rtx op0, op1;
4476 tree arg;
4477
4478 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4479 return NULL_RTX;
4480
4481 arg = CALL_EXPR_ARG (exp, 0);
4482 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4483
4484 arg = CALL_EXPR_ARG (exp, 1);
4485 op1 = expand_normal (arg);
4486
4487 return expand_copysign (op0, op1, target);
4488 }
4489
4490 /* Expand a call to __builtin___clear_cache. */
4491
4492 static rtx
4493 expand_builtin___clear_cache (tree exp)
4494 {
4495 if (!targetm.code_for_clear_cache)
4496 {
4497 #ifdef CLEAR_INSN_CACHE
4498 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4499 does something. Just do the default expansion to a call to
4500 __clear_cache(). */
4501 return NULL_RTX;
4502 #else
4503 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4504 does nothing. There is no need to call it. Do nothing. */
4505 return const0_rtx;
4506 #endif /* CLEAR_INSN_CACHE */
4507 }
4508
4509 /* We have a "clear_cache" insn, and it will handle everything. */
4510 tree begin, end;
4511 rtx begin_rtx, end_rtx;
4512
4513 /* We must not expand to a library call. If we did, any
4514 fallback library function in libgcc that might contain a call to
4515 __builtin___clear_cache() would recurse infinitely. */
4516 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4517 {
4518 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4519 return const0_rtx;
4520 }
4521
4522 if (targetm.have_clear_cache ())
4523 {
4524 struct expand_operand ops[2];
4525
4526 begin = CALL_EXPR_ARG (exp, 0);
4527 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4528
4529 end = CALL_EXPR_ARG (exp, 1);
4530 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4531
4532 create_address_operand (&ops[0], begin_rtx);
4533 create_address_operand (&ops[1], end_rtx);
4534 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4535 return const0_rtx;
4536 }
4537 return const0_rtx;
4538 }
4539
4540 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4541
4542 static rtx
4543 round_trampoline_addr (rtx tramp)
4544 {
4545 rtx temp, addend, mask;
4546
4547 /* If we don't need too much alignment, we'll have been guaranteed
4548 proper alignment by get_trampoline_type. */
4549 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4550 return tramp;
4551
4552 /* Round address up to desired boundary. */
4553 temp = gen_reg_rtx (Pmode);
4554 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4555 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4556
4557 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4558 temp, 0, OPTAB_LIB_WIDEN);
4559 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4560 temp, 0, OPTAB_LIB_WIDEN);
4561
4562 return tramp;
4563 }
4564
4565 static rtx
4566 expand_builtin_init_trampoline (tree exp, bool onstack)
4567 {
4568 tree t_tramp, t_func, t_chain;
4569 rtx m_tramp, r_tramp, r_chain, tmp;
4570
4571 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4572 POINTER_TYPE, VOID_TYPE))
4573 return NULL_RTX;
4574
4575 t_tramp = CALL_EXPR_ARG (exp, 0);
4576 t_func = CALL_EXPR_ARG (exp, 1);
4577 t_chain = CALL_EXPR_ARG (exp, 2);
4578
4579 r_tramp = expand_normal (t_tramp);
4580 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4581 MEM_NOTRAP_P (m_tramp) = 1;
4582
4583 /* If ONSTACK, the TRAMP argument should be the address of a field
4584 within the local function's FRAME decl. Either way, let's see if
4585 we can fill in the MEM_ATTRs for this memory. */
4586 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4587 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4588
4589 /* Creator of a heap trampoline is responsible for making sure the
4590 address is aligned to at least STACK_BOUNDARY. Normally malloc
4591 will ensure this anyhow. */
4592 tmp = round_trampoline_addr (r_tramp);
4593 if (tmp != r_tramp)
4594 {
4595 m_tramp = change_address (m_tramp, BLKmode, tmp);
4596 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4597 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4598 }
4599
4600 /* The FUNC argument should be the address of the nested function.
4601 Extract the actual function decl to pass to the hook. */
4602 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4603 t_func = TREE_OPERAND (t_func, 0);
4604 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4605
4606 r_chain = expand_normal (t_chain);
4607
4608 /* Generate insns to initialize the trampoline. */
4609 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4610
4611 if (onstack)
4612 {
4613 trampolines_created = 1;
4614
4615 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4616 "trampoline generated for nested function %qD", t_func);
4617 }
4618
4619 return const0_rtx;
4620 }
4621
4622 static rtx
4623 expand_builtin_adjust_trampoline (tree exp)
4624 {
4625 rtx tramp;
4626
4627 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4628 return NULL_RTX;
4629
4630 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4631 tramp = round_trampoline_addr (tramp);
4632 if (targetm.calls.trampoline_adjust_address)
4633 tramp = targetm.calls.trampoline_adjust_address (tramp);
4634
4635 return tramp;
4636 }
4637
4638 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4639 function. The function first checks whether the back end provides
4640 an insn to implement signbit for the respective mode. If not, it
4641 checks whether the floating point format of the value is such that
4642 the sign bit can be extracted. If that is not the case, error out.
4643 EXP is the expression that is a call to the builtin function; if
4644 convenient, the result should be placed in TARGET. */
4645 static rtx
4646 expand_builtin_signbit (tree exp, rtx target)
4647 {
4648 const struct real_format *fmt;
4649 machine_mode fmode, imode, rmode;
4650 tree arg;
4651 int word, bitpos;
4652 enum insn_code icode;
4653 rtx temp;
4654 location_t loc = EXPR_LOCATION (exp);
4655
4656 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4657 return NULL_RTX;
4658
4659 arg = CALL_EXPR_ARG (exp, 0);
4660 fmode = TYPE_MODE (TREE_TYPE (arg));
4661 rmode = TYPE_MODE (TREE_TYPE (exp));
4662 fmt = REAL_MODE_FORMAT (fmode);
4663
4664 arg = builtin_save_expr (arg);
4665
4666 /* Expand the argument yielding a RTX expression. */
4667 temp = expand_normal (arg);
4668
4669 /* Check if the back end provides an insn that handles signbit for the
4670 argument's mode. */
4671 icode = optab_handler (signbit_optab, fmode);
4672 if (icode != CODE_FOR_nothing)
4673 {
4674 rtx_insn *last = get_last_insn ();
4675 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4676 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4677 return target;
4678 delete_insns_since (last);
4679 }
4680
4681 /* For floating point formats without a sign bit, implement signbit
4682 as "ARG < 0.0". */
4683 bitpos = fmt->signbit_ro;
4684 if (bitpos < 0)
4685 {
4686 /* But we can't do this if the format supports signed zero. */
4687 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4688
4689 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4690 build_real (TREE_TYPE (arg), dconst0));
4691 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4692 }
4693
4694 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4695 {
4696 imode = int_mode_for_mode (fmode);
4697 gcc_assert (imode != BLKmode);
4698 temp = gen_lowpart (imode, temp);
4699 }
4700 else
4701 {
4702 imode = word_mode;
4703 /* Handle targets with different FP word orders. */
4704 if (FLOAT_WORDS_BIG_ENDIAN)
4705 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4706 else
4707 word = bitpos / BITS_PER_WORD;
4708 temp = operand_subword_force (temp, word, fmode);
4709 bitpos = bitpos % BITS_PER_WORD;
4710 }
4711
4712 /* Force the intermediate word_mode (or narrower) result into a
4713 register. This avoids attempting to create paradoxical SUBREGs
4714 of floating point modes below. */
4715 temp = force_reg (imode, temp);
4716
4717 /* If the bitpos is within the "result mode" lowpart, the operation
4718 can be implement with a single bitwise AND. Otherwise, we need
4719 a right shift and an AND. */
4720
4721 if (bitpos < GET_MODE_BITSIZE (rmode))
4722 {
4723 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4724
4725 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4726 temp = gen_lowpart (rmode, temp);
4727 temp = expand_binop (rmode, and_optab, temp,
4728 immed_wide_int_const (mask, rmode),
4729 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4730 }
4731 else
4732 {
4733 /* Perform a logical right shift to place the signbit in the least
4734 significant bit, then truncate the result to the desired mode
4735 and mask just this bit. */
4736 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4737 temp = gen_lowpart (rmode, temp);
4738 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4739 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4740 }
4741
4742 return temp;
4743 }
4744
4745 /* Expand fork or exec calls. TARGET is the desired target of the
4746 call. EXP is the call. FN is the
4747 identificator of the actual function. IGNORE is nonzero if the
4748 value is to be ignored. */
4749
4750 static rtx
4751 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4752 {
4753 tree id, decl;
4754 tree call;
4755
4756 /* If we are not profiling, just call the function. */
4757 if (!profile_arc_flag)
4758 return NULL_RTX;
4759
4760 /* Otherwise call the wrapper. This should be equivalent for the rest of
4761 compiler, so the code does not diverge, and the wrapper may run the
4762 code necessary for keeping the profiling sane. */
4763
4764 switch (DECL_FUNCTION_CODE (fn))
4765 {
4766 case BUILT_IN_FORK:
4767 id = get_identifier ("__gcov_fork");
4768 break;
4769
4770 case BUILT_IN_EXECL:
4771 id = get_identifier ("__gcov_execl");
4772 break;
4773
4774 case BUILT_IN_EXECV:
4775 id = get_identifier ("__gcov_execv");
4776 break;
4777
4778 case BUILT_IN_EXECLP:
4779 id = get_identifier ("__gcov_execlp");
4780 break;
4781
4782 case BUILT_IN_EXECLE:
4783 id = get_identifier ("__gcov_execle");
4784 break;
4785
4786 case BUILT_IN_EXECVP:
4787 id = get_identifier ("__gcov_execvp");
4788 break;
4789
4790 case BUILT_IN_EXECVE:
4791 id = get_identifier ("__gcov_execve");
4792 break;
4793
4794 default:
4795 gcc_unreachable ();
4796 }
4797
4798 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4799 FUNCTION_DECL, id, TREE_TYPE (fn));
4800 DECL_EXTERNAL (decl) = 1;
4801 TREE_PUBLIC (decl) = 1;
4802 DECL_ARTIFICIAL (decl) = 1;
4803 TREE_NOTHROW (decl) = 1;
4804 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4805 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4806 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4807 return expand_call (call, target, ignore);
4808 }
4809
4810
4811 \f
4812 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4813 the pointer in these functions is void*, the tree optimizers may remove
4814 casts. The mode computed in expand_builtin isn't reliable either, due
4815 to __sync_bool_compare_and_swap.
4816
4817 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4818 group of builtins. This gives us log2 of the mode size. */
4819
4820 static inline machine_mode
4821 get_builtin_sync_mode (int fcode_diff)
4822 {
4823 /* The size is not negotiable, so ask not to get BLKmode in return
4824 if the target indicates that a smaller size would be better. */
4825 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4826 }
4827
4828 /* Expand the memory expression LOC and return the appropriate memory operand
4829 for the builtin_sync operations. */
4830
4831 static rtx
4832 get_builtin_sync_mem (tree loc, machine_mode mode)
4833 {
4834 rtx addr, mem;
4835
4836 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4837 addr = convert_memory_address (Pmode, addr);
4838
4839 /* Note that we explicitly do not want any alias information for this
4840 memory, so that we kill all other live memories. Otherwise we don't
4841 satisfy the full barrier semantics of the intrinsic. */
4842 mem = validize_mem (gen_rtx_MEM (mode, addr));
4843
4844 /* The alignment needs to be at least according to that of the mode. */
4845 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4846 get_pointer_alignment (loc)));
4847 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4848 MEM_VOLATILE_P (mem) = 1;
4849
4850 return mem;
4851 }
4852
4853 /* Make sure an argument is in the right mode.
4854 EXP is the tree argument.
4855 MODE is the mode it should be in. */
4856
4857 static rtx
4858 expand_expr_force_mode (tree exp, machine_mode mode)
4859 {
4860 rtx val;
4861 machine_mode old_mode;
4862
4863 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4864 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4865 of CONST_INTs, where we know the old_mode only from the call argument. */
4866
4867 old_mode = GET_MODE (val);
4868 if (old_mode == VOIDmode)
4869 old_mode = TYPE_MODE (TREE_TYPE (exp));
4870 val = convert_modes (mode, old_mode, val, 1);
4871 return val;
4872 }
4873
4874
4875 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4876 EXP is the CALL_EXPR. CODE is the rtx code
4877 that corresponds to the arithmetic or logical operation from the name;
4878 an exception here is that NOT actually means NAND. TARGET is an optional
4879 place for us to store the results; AFTER is true if this is the
4880 fetch_and_xxx form. */
4881
4882 static rtx
4883 expand_builtin_sync_operation (machine_mode mode, tree exp,
4884 enum rtx_code code, bool after,
4885 rtx target)
4886 {
4887 rtx val, mem;
4888 location_t loc = EXPR_LOCATION (exp);
4889
4890 if (code == NOT && warn_sync_nand)
4891 {
4892 tree fndecl = get_callee_fndecl (exp);
4893 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4894
4895 static bool warned_f_a_n, warned_n_a_f;
4896
4897 switch (fcode)
4898 {
4899 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4900 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4901 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4902 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4903 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4904 if (warned_f_a_n)
4905 break;
4906
4907 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4908 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4909 warned_f_a_n = true;
4910 break;
4911
4912 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4913 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4914 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4915 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4916 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4917 if (warned_n_a_f)
4918 break;
4919
4920 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4921 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4922 warned_n_a_f = true;
4923 break;
4924
4925 default:
4926 gcc_unreachable ();
4927 }
4928 }
4929
4930 /* Expand the operands. */
4931 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4932 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4933
4934 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4935 after);
4936 }
4937
4938 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4939 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4940 true if this is the boolean form. TARGET is a place for us to store the
4941 results; this is NOT optional if IS_BOOL is true. */
4942
4943 static rtx
4944 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4945 bool is_bool, rtx target)
4946 {
4947 rtx old_val, new_val, mem;
4948 rtx *pbool, *poval;
4949
4950 /* Expand the operands. */
4951 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4952 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4953 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4954
4955 pbool = poval = NULL;
4956 if (target != const0_rtx)
4957 {
4958 if (is_bool)
4959 pbool = &target;
4960 else
4961 poval = &target;
4962 }
4963 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4964 false, MEMMODEL_SYNC_SEQ_CST,
4965 MEMMODEL_SYNC_SEQ_CST))
4966 return NULL_RTX;
4967
4968 return target;
4969 }
4970
4971 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4972 general form is actually an atomic exchange, and some targets only
4973 support a reduced form with the second argument being a constant 1.
4974 EXP is the CALL_EXPR; TARGET is an optional place for us to store
4975 the results. */
4976
4977 static rtx
4978 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
4979 rtx target)
4980 {
4981 rtx val, mem;
4982
4983 /* Expand the operands. */
4984 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4985 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4986
4987 return expand_sync_lock_test_and_set (target, mem, val);
4988 }
4989
4990 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
4991
4992 static void
4993 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
4994 {
4995 rtx mem;
4996
4997 /* Expand the operands. */
4998 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4999
5000 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5001 }
5002
5003 /* Given an integer representing an ``enum memmodel'', verify its
5004 correctness and return the memory model enum. */
5005
5006 static enum memmodel
5007 get_memmodel (tree exp)
5008 {
5009 rtx op;
5010 unsigned HOST_WIDE_INT val;
5011 source_location loc
5012 = expansion_point_location_if_in_system_header (input_location);
5013
5014 /* If the parameter is not a constant, it's a run time value so we'll just
5015 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5016 if (TREE_CODE (exp) != INTEGER_CST)
5017 return MEMMODEL_SEQ_CST;
5018
5019 op = expand_normal (exp);
5020
5021 val = INTVAL (op);
5022 if (targetm.memmodel_check)
5023 val = targetm.memmodel_check (val);
5024 else if (val & ~MEMMODEL_MASK)
5025 {
5026 warning_at (loc, OPT_Winvalid_memory_model,
5027 "unknown architecture specifier in memory model to builtin");
5028 return MEMMODEL_SEQ_CST;
5029 }
5030
5031 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5032 if (memmodel_base (val) >= MEMMODEL_LAST)
5033 {
5034 warning_at (loc, OPT_Winvalid_memory_model,
5035 "invalid memory model argument to builtin");
5036 return MEMMODEL_SEQ_CST;
5037 }
5038
5039 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5040 be conservative and promote consume to acquire. */
5041 if (val == MEMMODEL_CONSUME)
5042 val = MEMMODEL_ACQUIRE;
5043
5044 return (enum memmodel) val;
5045 }
5046
5047 /* Expand the __atomic_exchange intrinsic:
5048 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5049 EXP is the CALL_EXPR.
5050 TARGET is an optional place for us to store the results. */
5051
5052 static rtx
5053 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5054 {
5055 rtx val, mem;
5056 enum memmodel model;
5057
5058 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5059
5060 if (!flag_inline_atomics)
5061 return NULL_RTX;
5062
5063 /* Expand the operands. */
5064 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5065 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5066
5067 return expand_atomic_exchange (target, mem, val, model);
5068 }
5069
5070 /* Expand the __atomic_compare_exchange intrinsic:
5071 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5072 TYPE desired, BOOL weak,
5073 enum memmodel success,
5074 enum memmodel failure)
5075 EXP is the CALL_EXPR.
5076 TARGET is an optional place for us to store the results. */
5077
5078 static rtx
5079 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5080 rtx target)
5081 {
5082 rtx expect, desired, mem, oldval;
5083 rtx_code_label *label;
5084 enum memmodel success, failure;
5085 tree weak;
5086 bool is_weak;
5087 source_location loc
5088 = expansion_point_location_if_in_system_header (input_location);
5089
5090 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5091 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5092
5093 if (failure > success)
5094 {
5095 warning_at (loc, OPT_Winvalid_memory_model,
5096 "failure memory model cannot be stronger than success "
5097 "memory model for %<__atomic_compare_exchange%>");
5098 success = MEMMODEL_SEQ_CST;
5099 }
5100
5101 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5102 {
5103 warning_at (loc, OPT_Winvalid_memory_model,
5104 "invalid failure memory model for "
5105 "%<__atomic_compare_exchange%>");
5106 failure = MEMMODEL_SEQ_CST;
5107 success = MEMMODEL_SEQ_CST;
5108 }
5109
5110
5111 if (!flag_inline_atomics)
5112 return NULL_RTX;
5113
5114 /* Expand the operands. */
5115 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5116
5117 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5118 expect = convert_memory_address (Pmode, expect);
5119 expect = gen_rtx_MEM (mode, expect);
5120 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5121
5122 weak = CALL_EXPR_ARG (exp, 3);
5123 is_weak = false;
5124 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5125 is_weak = true;
5126
5127 if (target == const0_rtx)
5128 target = NULL;
5129
5130 /* Lest the rtl backend create a race condition with an imporoper store
5131 to memory, always create a new pseudo for OLDVAL. */
5132 oldval = NULL;
5133
5134 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5135 is_weak, success, failure))
5136 return NULL_RTX;
5137
5138 /* Conditionally store back to EXPECT, lest we create a race condition
5139 with an improper store to memory. */
5140 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5141 the normal case where EXPECT is totally private, i.e. a register. At
5142 which point the store can be unconditional. */
5143 label = gen_label_rtx ();
5144 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5145 GET_MODE (target), 1, label);
5146 emit_move_insn (expect, oldval);
5147 emit_label (label);
5148
5149 return target;
5150 }
5151
5152 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5153 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5154 call. The weak parameter must be dropped to match the expected parameter
5155 list and the expected argument changed from value to pointer to memory
5156 slot. */
5157
5158 static void
5159 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5160 {
5161 unsigned int z;
5162 vec<tree, va_gc> *vec;
5163
5164 vec_alloc (vec, 5);
5165 vec->quick_push (gimple_call_arg (call, 0));
5166 tree expected = gimple_call_arg (call, 1);
5167 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5168 TREE_TYPE (expected));
5169 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5170 if (expd != x)
5171 emit_move_insn (x, expd);
5172 tree v = make_tree (TREE_TYPE (expected), x);
5173 vec->quick_push (build1 (ADDR_EXPR,
5174 build_pointer_type (TREE_TYPE (expected)), v));
5175 vec->quick_push (gimple_call_arg (call, 2));
5176 /* Skip the boolean weak parameter. */
5177 for (z = 4; z < 6; z++)
5178 vec->quick_push (gimple_call_arg (call, z));
5179 built_in_function fncode
5180 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5181 + exact_log2 (GET_MODE_SIZE (mode)));
5182 tree fndecl = builtin_decl_explicit (fncode);
5183 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5184 fndecl);
5185 tree exp = build_call_vec (boolean_type_node, fn, vec);
5186 tree lhs = gimple_call_lhs (call);
5187 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5188 if (lhs)
5189 {
5190 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5191 if (GET_MODE (boolret) != mode)
5192 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5193 x = force_reg (mode, x);
5194 write_complex_part (target, boolret, true);
5195 write_complex_part (target, x, false);
5196 }
5197 }
5198
5199 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5200
5201 void
5202 expand_ifn_atomic_compare_exchange (gcall *call)
5203 {
5204 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5205 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5206 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5207 rtx expect, desired, mem, oldval, boolret;
5208 enum memmodel success, failure;
5209 tree lhs;
5210 bool is_weak;
5211 source_location loc
5212 = expansion_point_location_if_in_system_header (gimple_location (call));
5213
5214 success = get_memmodel (gimple_call_arg (call, 4));
5215 failure = get_memmodel (gimple_call_arg (call, 5));
5216
5217 if (failure > success)
5218 {
5219 warning_at (loc, OPT_Winvalid_memory_model,
5220 "failure memory model cannot be stronger than success "
5221 "memory model for %<__atomic_compare_exchange%>");
5222 success = MEMMODEL_SEQ_CST;
5223 }
5224
5225 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5226 {
5227 warning_at (loc, OPT_Winvalid_memory_model,
5228 "invalid failure memory model for "
5229 "%<__atomic_compare_exchange%>");
5230 failure = MEMMODEL_SEQ_CST;
5231 success = MEMMODEL_SEQ_CST;
5232 }
5233
5234 if (!flag_inline_atomics)
5235 {
5236 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5237 return;
5238 }
5239
5240 /* Expand the operands. */
5241 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5242
5243 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5244 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5245
5246 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5247
5248 boolret = NULL;
5249 oldval = NULL;
5250
5251 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5252 is_weak, success, failure))
5253 {
5254 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5255 return;
5256 }
5257
5258 lhs = gimple_call_lhs (call);
5259 if (lhs)
5260 {
5261 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5262 if (GET_MODE (boolret) != mode)
5263 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5264 write_complex_part (target, boolret, true);
5265 write_complex_part (target, oldval, false);
5266 }
5267 }
5268
5269 /* Expand the __atomic_load intrinsic:
5270 TYPE __atomic_load (TYPE *object, enum memmodel)
5271 EXP is the CALL_EXPR.
5272 TARGET is an optional place for us to store the results. */
5273
5274 static rtx
5275 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5276 {
5277 rtx mem;
5278 enum memmodel model;
5279
5280 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5281 if (is_mm_release (model) || is_mm_acq_rel (model))
5282 {
5283 source_location loc
5284 = expansion_point_location_if_in_system_header (input_location);
5285 warning_at (loc, OPT_Winvalid_memory_model,
5286 "invalid memory model for %<__atomic_load%>");
5287 model = MEMMODEL_SEQ_CST;
5288 }
5289
5290 if (!flag_inline_atomics)
5291 return NULL_RTX;
5292
5293 /* Expand the operand. */
5294 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5295
5296 return expand_atomic_load (target, mem, model);
5297 }
5298
5299
5300 /* Expand the __atomic_store intrinsic:
5301 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5302 EXP is the CALL_EXPR.
5303 TARGET is an optional place for us to store the results. */
5304
5305 static rtx
5306 expand_builtin_atomic_store (machine_mode mode, tree exp)
5307 {
5308 rtx mem, val;
5309 enum memmodel model;
5310
5311 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5312 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5313 || is_mm_release (model)))
5314 {
5315 source_location loc
5316 = expansion_point_location_if_in_system_header (input_location);
5317 warning_at (loc, OPT_Winvalid_memory_model,
5318 "invalid memory model for %<__atomic_store%>");
5319 model = MEMMODEL_SEQ_CST;
5320 }
5321
5322 if (!flag_inline_atomics)
5323 return NULL_RTX;
5324
5325 /* Expand the operands. */
5326 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5327 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5328
5329 return expand_atomic_store (mem, val, model, false);
5330 }
5331
5332 /* Expand the __atomic_fetch_XXX intrinsic:
5333 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5334 EXP is the CALL_EXPR.
5335 TARGET is an optional place for us to store the results.
5336 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5337 FETCH_AFTER is true if returning the result of the operation.
5338 FETCH_AFTER is false if returning the value before the operation.
5339 IGNORE is true if the result is not used.
5340 EXT_CALL is the correct builtin for an external call if this cannot be
5341 resolved to an instruction sequence. */
5342
5343 static rtx
5344 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5345 enum rtx_code code, bool fetch_after,
5346 bool ignore, enum built_in_function ext_call)
5347 {
5348 rtx val, mem, ret;
5349 enum memmodel model;
5350 tree fndecl;
5351 tree addr;
5352
5353 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5354
5355 /* Expand the operands. */
5356 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5357 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5358
5359 /* Only try generating instructions if inlining is turned on. */
5360 if (flag_inline_atomics)
5361 {
5362 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5363 if (ret)
5364 return ret;
5365 }
5366
5367 /* Return if a different routine isn't needed for the library call. */
5368 if (ext_call == BUILT_IN_NONE)
5369 return NULL_RTX;
5370
5371 /* Change the call to the specified function. */
5372 fndecl = get_callee_fndecl (exp);
5373 addr = CALL_EXPR_FN (exp);
5374 STRIP_NOPS (addr);
5375
5376 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5377 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5378
5379 /* Expand the call here so we can emit trailing code. */
5380 ret = expand_call (exp, target, ignore);
5381
5382 /* Replace the original function just in case it matters. */
5383 TREE_OPERAND (addr, 0) = fndecl;
5384
5385 /* Then issue the arithmetic correction to return the right result. */
5386 if (!ignore)
5387 {
5388 if (code == NOT)
5389 {
5390 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5391 OPTAB_LIB_WIDEN);
5392 ret = expand_simple_unop (mode, NOT, ret, target, true);
5393 }
5394 else
5395 ret = expand_simple_binop (mode, code, ret, val, target, true,
5396 OPTAB_LIB_WIDEN);
5397 }
5398 return ret;
5399 }
5400
5401 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5402
5403 void
5404 expand_ifn_atomic_bit_test_and (gcall *call)
5405 {
5406 tree ptr = gimple_call_arg (call, 0);
5407 tree bit = gimple_call_arg (call, 1);
5408 tree flag = gimple_call_arg (call, 2);
5409 tree lhs = gimple_call_lhs (call);
5410 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5411 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5412 enum rtx_code code;
5413 optab optab;
5414 struct expand_operand ops[5];
5415
5416 gcc_assert (flag_inline_atomics);
5417
5418 if (gimple_call_num_args (call) == 4)
5419 model = get_memmodel (gimple_call_arg (call, 3));
5420
5421 rtx mem = get_builtin_sync_mem (ptr, mode);
5422 rtx val = expand_expr_force_mode (bit, mode);
5423
5424 switch (gimple_call_internal_fn (call))
5425 {
5426 case IFN_ATOMIC_BIT_TEST_AND_SET:
5427 code = IOR;
5428 optab = atomic_bit_test_and_set_optab;
5429 break;
5430 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5431 code = XOR;
5432 optab = atomic_bit_test_and_complement_optab;
5433 break;
5434 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5435 code = AND;
5436 optab = atomic_bit_test_and_reset_optab;
5437 break;
5438 default:
5439 gcc_unreachable ();
5440 }
5441
5442 if (lhs == NULL_TREE)
5443 {
5444 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5445 val, NULL_RTX, true, OPTAB_DIRECT);
5446 if (code == AND)
5447 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5448 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5449 return;
5450 }
5451
5452 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5453 enum insn_code icode = direct_optab_handler (optab, mode);
5454 gcc_assert (icode != CODE_FOR_nothing);
5455 create_output_operand (&ops[0], target, mode);
5456 create_fixed_operand (&ops[1], mem);
5457 create_convert_operand_to (&ops[2], val, mode, true);
5458 create_integer_operand (&ops[3], model);
5459 create_integer_operand (&ops[4], integer_onep (flag));
5460 if (maybe_expand_insn (icode, 5, ops))
5461 return;
5462
5463 rtx bitval = val;
5464 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5465 val, NULL_RTX, true, OPTAB_DIRECT);
5466 rtx maskval = val;
5467 if (code == AND)
5468 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5469 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5470 code, model, false);
5471 if (integer_onep (flag))
5472 {
5473 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5474 NULL_RTX, true, OPTAB_DIRECT);
5475 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5476 true, OPTAB_DIRECT);
5477 }
5478 else
5479 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5480 OPTAB_DIRECT);
5481 if (result != target)
5482 emit_move_insn (target, result);
5483 }
5484
5485 /* Expand an atomic clear operation.
5486 void _atomic_clear (BOOL *obj, enum memmodel)
5487 EXP is the call expression. */
5488
5489 static rtx
5490 expand_builtin_atomic_clear (tree exp)
5491 {
5492 machine_mode mode;
5493 rtx mem, ret;
5494 enum memmodel model;
5495
5496 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5497 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5498 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5499
5500 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5501 {
5502 source_location loc
5503 = expansion_point_location_if_in_system_header (input_location);
5504 warning_at (loc, OPT_Winvalid_memory_model,
5505 "invalid memory model for %<__atomic_store%>");
5506 model = MEMMODEL_SEQ_CST;
5507 }
5508
5509 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5510 Failing that, a store is issued by __atomic_store. The only way this can
5511 fail is if the bool type is larger than a word size. Unlikely, but
5512 handle it anyway for completeness. Assume a single threaded model since
5513 there is no atomic support in this case, and no barriers are required. */
5514 ret = expand_atomic_store (mem, const0_rtx, model, true);
5515 if (!ret)
5516 emit_move_insn (mem, const0_rtx);
5517 return const0_rtx;
5518 }
5519
5520 /* Expand an atomic test_and_set operation.
5521 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5522 EXP is the call expression. */
5523
5524 static rtx
5525 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5526 {
5527 rtx mem;
5528 enum memmodel model;
5529 machine_mode mode;
5530
5531 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5532 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5533 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5534
5535 return expand_atomic_test_and_set (target, mem, model);
5536 }
5537
5538
5539 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5540 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5541
5542 static tree
5543 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5544 {
5545 int size;
5546 machine_mode mode;
5547 unsigned int mode_align, type_align;
5548
5549 if (TREE_CODE (arg0) != INTEGER_CST)
5550 return NULL_TREE;
5551
5552 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5553 mode = mode_for_size (size, MODE_INT, 0);
5554 mode_align = GET_MODE_ALIGNMENT (mode);
5555
5556 if (TREE_CODE (arg1) == INTEGER_CST)
5557 {
5558 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5559
5560 /* Either this argument is null, or it's a fake pointer encoding
5561 the alignment of the object. */
5562 val = least_bit_hwi (val);
5563 val *= BITS_PER_UNIT;
5564
5565 if (val == 0 || mode_align < val)
5566 type_align = mode_align;
5567 else
5568 type_align = val;
5569 }
5570 else
5571 {
5572 tree ttype = TREE_TYPE (arg1);
5573
5574 /* This function is usually invoked and folded immediately by the front
5575 end before anything else has a chance to look at it. The pointer
5576 parameter at this point is usually cast to a void *, so check for that
5577 and look past the cast. */
5578 if (CONVERT_EXPR_P (arg1)
5579 && POINTER_TYPE_P (ttype)
5580 && VOID_TYPE_P (TREE_TYPE (ttype))
5581 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5582 arg1 = TREE_OPERAND (arg1, 0);
5583
5584 ttype = TREE_TYPE (arg1);
5585 gcc_assert (POINTER_TYPE_P (ttype));
5586
5587 /* Get the underlying type of the object. */
5588 ttype = TREE_TYPE (ttype);
5589 type_align = TYPE_ALIGN (ttype);
5590 }
5591
5592 /* If the object has smaller alignment, the lock free routines cannot
5593 be used. */
5594 if (type_align < mode_align)
5595 return boolean_false_node;
5596
5597 /* Check if a compare_and_swap pattern exists for the mode which represents
5598 the required size. The pattern is not allowed to fail, so the existence
5599 of the pattern indicates support is present. */
5600 if (can_compare_and_swap_p (mode, true))
5601 return boolean_true_node;
5602 else
5603 return boolean_false_node;
5604 }
5605
5606 /* Return true if the parameters to call EXP represent an object which will
5607 always generate lock free instructions. The first argument represents the
5608 size of the object, and the second parameter is a pointer to the object
5609 itself. If NULL is passed for the object, then the result is based on
5610 typical alignment for an object of the specified size. Otherwise return
5611 false. */
5612
5613 static rtx
5614 expand_builtin_atomic_always_lock_free (tree exp)
5615 {
5616 tree size;
5617 tree arg0 = CALL_EXPR_ARG (exp, 0);
5618 tree arg1 = CALL_EXPR_ARG (exp, 1);
5619
5620 if (TREE_CODE (arg0) != INTEGER_CST)
5621 {
5622 error ("non-constant argument 1 to __atomic_always_lock_free");
5623 return const0_rtx;
5624 }
5625
5626 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5627 if (size == boolean_true_node)
5628 return const1_rtx;
5629 return const0_rtx;
5630 }
5631
5632 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5633 is lock free on this architecture. */
5634
5635 static tree
5636 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5637 {
5638 if (!flag_inline_atomics)
5639 return NULL_TREE;
5640
5641 /* If it isn't always lock free, don't generate a result. */
5642 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5643 return boolean_true_node;
5644
5645 return NULL_TREE;
5646 }
5647
5648 /* Return true if the parameters to call EXP represent an object which will
5649 always generate lock free instructions. The first argument represents the
5650 size of the object, and the second parameter is a pointer to the object
5651 itself. If NULL is passed for the object, then the result is based on
5652 typical alignment for an object of the specified size. Otherwise return
5653 NULL*/
5654
5655 static rtx
5656 expand_builtin_atomic_is_lock_free (tree exp)
5657 {
5658 tree size;
5659 tree arg0 = CALL_EXPR_ARG (exp, 0);
5660 tree arg1 = CALL_EXPR_ARG (exp, 1);
5661
5662 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5663 {
5664 error ("non-integer argument 1 to __atomic_is_lock_free");
5665 return NULL_RTX;
5666 }
5667
5668 if (!flag_inline_atomics)
5669 return NULL_RTX;
5670
5671 /* If the value is known at compile time, return the RTX for it. */
5672 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5673 if (size == boolean_true_node)
5674 return const1_rtx;
5675
5676 return NULL_RTX;
5677 }
5678
5679 /* Expand the __atomic_thread_fence intrinsic:
5680 void __atomic_thread_fence (enum memmodel)
5681 EXP is the CALL_EXPR. */
5682
5683 static void
5684 expand_builtin_atomic_thread_fence (tree exp)
5685 {
5686 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5687 expand_mem_thread_fence (model);
5688 }
5689
5690 /* Expand the __atomic_signal_fence intrinsic:
5691 void __atomic_signal_fence (enum memmodel)
5692 EXP is the CALL_EXPR. */
5693
5694 static void
5695 expand_builtin_atomic_signal_fence (tree exp)
5696 {
5697 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5698 expand_mem_signal_fence (model);
5699 }
5700
5701 /* Expand the __sync_synchronize intrinsic. */
5702
5703 static void
5704 expand_builtin_sync_synchronize (void)
5705 {
5706 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5707 }
5708
5709 static rtx
5710 expand_builtin_thread_pointer (tree exp, rtx target)
5711 {
5712 enum insn_code icode;
5713 if (!validate_arglist (exp, VOID_TYPE))
5714 return const0_rtx;
5715 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5716 if (icode != CODE_FOR_nothing)
5717 {
5718 struct expand_operand op;
5719 /* If the target is not sutitable then create a new target. */
5720 if (target == NULL_RTX
5721 || !REG_P (target)
5722 || GET_MODE (target) != Pmode)
5723 target = gen_reg_rtx (Pmode);
5724 create_output_operand (&op, target, Pmode);
5725 expand_insn (icode, 1, &op);
5726 return target;
5727 }
5728 error ("__builtin_thread_pointer is not supported on this target");
5729 return const0_rtx;
5730 }
5731
5732 static void
5733 expand_builtin_set_thread_pointer (tree exp)
5734 {
5735 enum insn_code icode;
5736 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5737 return;
5738 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5739 if (icode != CODE_FOR_nothing)
5740 {
5741 struct expand_operand op;
5742 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5743 Pmode, EXPAND_NORMAL);
5744 create_input_operand (&op, val, Pmode);
5745 expand_insn (icode, 1, &op);
5746 return;
5747 }
5748 error ("__builtin_set_thread_pointer is not supported on this target");
5749 }
5750
5751 \f
5752 /* Emit code to restore the current value of stack. */
5753
5754 static void
5755 expand_stack_restore (tree var)
5756 {
5757 rtx_insn *prev;
5758 rtx sa = expand_normal (var);
5759
5760 sa = convert_memory_address (Pmode, sa);
5761
5762 prev = get_last_insn ();
5763 emit_stack_restore (SAVE_BLOCK, sa);
5764
5765 record_new_stack_level ();
5766
5767 fixup_args_size_notes (prev, get_last_insn (), 0);
5768 }
5769
5770 /* Emit code to save the current value of stack. */
5771
5772 static rtx
5773 expand_stack_save (void)
5774 {
5775 rtx ret = NULL_RTX;
5776
5777 emit_stack_save (SAVE_BLOCK, &ret);
5778 return ret;
5779 }
5780
5781
5782 /* Expand an expression EXP that calls a built-in function,
5783 with result going to TARGET if that's convenient
5784 (and in mode MODE if that's convenient).
5785 SUBTARGET may be used as the target for computing one of EXP's operands.
5786 IGNORE is nonzero if the value is to be ignored. */
5787
5788 rtx
5789 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5790 int ignore)
5791 {
5792 tree fndecl = get_callee_fndecl (exp);
5793 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5794 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5795 int flags;
5796
5797 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5798 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5799
5800 /* When ASan is enabled, we don't want to expand some memory/string
5801 builtins and rely on libsanitizer's hooks. This allows us to avoid
5802 redundant checks and be sure, that possible overflow will be detected
5803 by ASan. */
5804
5805 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5806 return expand_call (exp, target, ignore);
5807
5808 /* When not optimizing, generate calls to library functions for a certain
5809 set of builtins. */
5810 if (!optimize
5811 && !called_as_built_in (fndecl)
5812 && fcode != BUILT_IN_FORK
5813 && fcode != BUILT_IN_EXECL
5814 && fcode != BUILT_IN_EXECV
5815 && fcode != BUILT_IN_EXECLP
5816 && fcode != BUILT_IN_EXECLE
5817 && fcode != BUILT_IN_EXECVP
5818 && fcode != BUILT_IN_EXECVE
5819 && fcode != BUILT_IN_ALLOCA
5820 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5821 && fcode != BUILT_IN_FREE
5822 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5823 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5824 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5825 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5826 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5827 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5828 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5829 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5830 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5831 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5832 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5833 && fcode != BUILT_IN_CHKP_BNDRET)
5834 return expand_call (exp, target, ignore);
5835
5836 /* The built-in function expanders test for target == const0_rtx
5837 to determine whether the function's result will be ignored. */
5838 if (ignore)
5839 target = const0_rtx;
5840
5841 /* If the result of a pure or const built-in function is ignored, and
5842 none of its arguments are volatile, we can avoid expanding the
5843 built-in call and just evaluate the arguments for side-effects. */
5844 if (target == const0_rtx
5845 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5846 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5847 {
5848 bool volatilep = false;
5849 tree arg;
5850 call_expr_arg_iterator iter;
5851
5852 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5853 if (TREE_THIS_VOLATILE (arg))
5854 {
5855 volatilep = true;
5856 break;
5857 }
5858
5859 if (! volatilep)
5860 {
5861 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5862 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5863 return const0_rtx;
5864 }
5865 }
5866
5867 /* expand_builtin_with_bounds is supposed to be used for
5868 instrumented builtin calls. */
5869 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5870
5871 switch (fcode)
5872 {
5873 CASE_FLT_FN (BUILT_IN_FABS):
5874 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5875 case BUILT_IN_FABSD32:
5876 case BUILT_IN_FABSD64:
5877 case BUILT_IN_FABSD128:
5878 target = expand_builtin_fabs (exp, target, subtarget);
5879 if (target)
5880 return target;
5881 break;
5882
5883 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5884 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5885 target = expand_builtin_copysign (exp, target, subtarget);
5886 if (target)
5887 return target;
5888 break;
5889
5890 /* Just do a normal library call if we were unable to fold
5891 the values. */
5892 CASE_FLT_FN (BUILT_IN_CABS):
5893 break;
5894
5895 CASE_FLT_FN (BUILT_IN_FMA):
5896 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5897 if (target)
5898 return target;
5899 break;
5900
5901 CASE_FLT_FN (BUILT_IN_ILOGB):
5902 if (! flag_unsafe_math_optimizations)
5903 break;
5904 CASE_FLT_FN (BUILT_IN_ISINF):
5905 CASE_FLT_FN (BUILT_IN_FINITE):
5906 case BUILT_IN_ISFINITE:
5907 case BUILT_IN_ISNORMAL:
5908 target = expand_builtin_interclass_mathfn (exp, target);
5909 if (target)
5910 return target;
5911 break;
5912
5913 CASE_FLT_FN (BUILT_IN_ICEIL):
5914 CASE_FLT_FN (BUILT_IN_LCEIL):
5915 CASE_FLT_FN (BUILT_IN_LLCEIL):
5916 CASE_FLT_FN (BUILT_IN_LFLOOR):
5917 CASE_FLT_FN (BUILT_IN_IFLOOR):
5918 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5919 target = expand_builtin_int_roundingfn (exp, target);
5920 if (target)
5921 return target;
5922 break;
5923
5924 CASE_FLT_FN (BUILT_IN_IRINT):
5925 CASE_FLT_FN (BUILT_IN_LRINT):
5926 CASE_FLT_FN (BUILT_IN_LLRINT):
5927 CASE_FLT_FN (BUILT_IN_IROUND):
5928 CASE_FLT_FN (BUILT_IN_LROUND):
5929 CASE_FLT_FN (BUILT_IN_LLROUND):
5930 target = expand_builtin_int_roundingfn_2 (exp, target);
5931 if (target)
5932 return target;
5933 break;
5934
5935 CASE_FLT_FN (BUILT_IN_POWI):
5936 target = expand_builtin_powi (exp, target);
5937 if (target)
5938 return target;
5939 break;
5940
5941 CASE_FLT_FN (BUILT_IN_CEXPI):
5942 target = expand_builtin_cexpi (exp, target);
5943 gcc_assert (target);
5944 return target;
5945
5946 CASE_FLT_FN (BUILT_IN_SIN):
5947 CASE_FLT_FN (BUILT_IN_COS):
5948 if (! flag_unsafe_math_optimizations)
5949 break;
5950 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5951 if (target)
5952 return target;
5953 break;
5954
5955 CASE_FLT_FN (BUILT_IN_SINCOS):
5956 if (! flag_unsafe_math_optimizations)
5957 break;
5958 target = expand_builtin_sincos (exp);
5959 if (target)
5960 return target;
5961 break;
5962
5963 case BUILT_IN_APPLY_ARGS:
5964 return expand_builtin_apply_args ();
5965
5966 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5967 FUNCTION with a copy of the parameters described by
5968 ARGUMENTS, and ARGSIZE. It returns a block of memory
5969 allocated on the stack into which is stored all the registers
5970 that might possibly be used for returning the result of a
5971 function. ARGUMENTS is the value returned by
5972 __builtin_apply_args. ARGSIZE is the number of bytes of
5973 arguments that must be copied. ??? How should this value be
5974 computed? We'll also need a safe worst case value for varargs
5975 functions. */
5976 case BUILT_IN_APPLY:
5977 if (!validate_arglist (exp, POINTER_TYPE,
5978 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5979 && !validate_arglist (exp, REFERENCE_TYPE,
5980 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5981 return const0_rtx;
5982 else
5983 {
5984 rtx ops[3];
5985
5986 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5987 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5988 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5989
5990 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5991 }
5992
5993 /* __builtin_return (RESULT) causes the function to return the
5994 value described by RESULT. RESULT is address of the block of
5995 memory returned by __builtin_apply. */
5996 case BUILT_IN_RETURN:
5997 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5998 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5999 return const0_rtx;
6000
6001 case BUILT_IN_SAVEREGS:
6002 return expand_builtin_saveregs ();
6003
6004 case BUILT_IN_VA_ARG_PACK:
6005 /* All valid uses of __builtin_va_arg_pack () are removed during
6006 inlining. */
6007 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6008 return const0_rtx;
6009
6010 case BUILT_IN_VA_ARG_PACK_LEN:
6011 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6012 inlining. */
6013 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6014 return const0_rtx;
6015
6016 /* Return the address of the first anonymous stack arg. */
6017 case BUILT_IN_NEXT_ARG:
6018 if (fold_builtin_next_arg (exp, false))
6019 return const0_rtx;
6020 return expand_builtin_next_arg ();
6021
6022 case BUILT_IN_CLEAR_CACHE:
6023 target = expand_builtin___clear_cache (exp);
6024 if (target)
6025 return target;
6026 break;
6027
6028 case BUILT_IN_CLASSIFY_TYPE:
6029 return expand_builtin_classify_type (exp);
6030
6031 case BUILT_IN_CONSTANT_P:
6032 return const0_rtx;
6033
6034 case BUILT_IN_FRAME_ADDRESS:
6035 case BUILT_IN_RETURN_ADDRESS:
6036 return expand_builtin_frame_address (fndecl, exp);
6037
6038 /* Returns the address of the area where the structure is returned.
6039 0 otherwise. */
6040 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6041 if (call_expr_nargs (exp) != 0
6042 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6043 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6044 return const0_rtx;
6045 else
6046 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6047
6048 case BUILT_IN_ALLOCA:
6049 case BUILT_IN_ALLOCA_WITH_ALIGN:
6050 /* If the allocation stems from the declaration of a variable-sized
6051 object, it cannot accumulate. */
6052 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6053 if (target)
6054 return target;
6055 break;
6056
6057 case BUILT_IN_STACK_SAVE:
6058 return expand_stack_save ();
6059
6060 case BUILT_IN_STACK_RESTORE:
6061 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6062 return const0_rtx;
6063
6064 case BUILT_IN_BSWAP16:
6065 case BUILT_IN_BSWAP32:
6066 case BUILT_IN_BSWAP64:
6067 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6068 if (target)
6069 return target;
6070 break;
6071
6072 CASE_INT_FN (BUILT_IN_FFS):
6073 target = expand_builtin_unop (target_mode, exp, target,
6074 subtarget, ffs_optab);
6075 if (target)
6076 return target;
6077 break;
6078
6079 CASE_INT_FN (BUILT_IN_CLZ):
6080 target = expand_builtin_unop (target_mode, exp, target,
6081 subtarget, clz_optab);
6082 if (target)
6083 return target;
6084 break;
6085
6086 CASE_INT_FN (BUILT_IN_CTZ):
6087 target = expand_builtin_unop (target_mode, exp, target,
6088 subtarget, ctz_optab);
6089 if (target)
6090 return target;
6091 break;
6092
6093 CASE_INT_FN (BUILT_IN_CLRSB):
6094 target = expand_builtin_unop (target_mode, exp, target,
6095 subtarget, clrsb_optab);
6096 if (target)
6097 return target;
6098 break;
6099
6100 CASE_INT_FN (BUILT_IN_POPCOUNT):
6101 target = expand_builtin_unop (target_mode, exp, target,
6102 subtarget, popcount_optab);
6103 if (target)
6104 return target;
6105 break;
6106
6107 CASE_INT_FN (BUILT_IN_PARITY):
6108 target = expand_builtin_unop (target_mode, exp, target,
6109 subtarget, parity_optab);
6110 if (target)
6111 return target;
6112 break;
6113
6114 case BUILT_IN_STRLEN:
6115 target = expand_builtin_strlen (exp, target, target_mode);
6116 if (target)
6117 return target;
6118 break;
6119
6120 case BUILT_IN_STRCPY:
6121 target = expand_builtin_strcpy (exp, target);
6122 if (target)
6123 return target;
6124 break;
6125
6126 case BUILT_IN_STRNCPY:
6127 target = expand_builtin_strncpy (exp, target);
6128 if (target)
6129 return target;
6130 break;
6131
6132 case BUILT_IN_STPCPY:
6133 target = expand_builtin_stpcpy (exp, target, mode);
6134 if (target)
6135 return target;
6136 break;
6137
6138 case BUILT_IN_MEMCPY:
6139 target = expand_builtin_memcpy (exp, target);
6140 if (target)
6141 return target;
6142 break;
6143
6144 case BUILT_IN_MEMPCPY:
6145 target = expand_builtin_mempcpy (exp, target, mode);
6146 if (target)
6147 return target;
6148 break;
6149
6150 case BUILT_IN_MEMSET:
6151 target = expand_builtin_memset (exp, target, mode);
6152 if (target)
6153 return target;
6154 break;
6155
6156 case BUILT_IN_BZERO:
6157 target = expand_builtin_bzero (exp);
6158 if (target)
6159 return target;
6160 break;
6161
6162 case BUILT_IN_STRCMP:
6163 target = expand_builtin_strcmp (exp, target);
6164 if (target)
6165 return target;
6166 break;
6167
6168 case BUILT_IN_STRNCMP:
6169 target = expand_builtin_strncmp (exp, target, mode);
6170 if (target)
6171 return target;
6172 break;
6173
6174 case BUILT_IN_BCMP:
6175 case BUILT_IN_MEMCMP:
6176 case BUILT_IN_MEMCMP_EQ:
6177 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6178 if (target)
6179 return target;
6180 if (fcode == BUILT_IN_MEMCMP_EQ)
6181 {
6182 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6183 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6184 }
6185 break;
6186
6187 case BUILT_IN_SETJMP:
6188 /* This should have been lowered to the builtins below. */
6189 gcc_unreachable ();
6190
6191 case BUILT_IN_SETJMP_SETUP:
6192 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6193 and the receiver label. */
6194 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6195 {
6196 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6197 VOIDmode, EXPAND_NORMAL);
6198 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6199 rtx_insn *label_r = label_rtx (label);
6200
6201 /* This is copied from the handling of non-local gotos. */
6202 expand_builtin_setjmp_setup (buf_addr, label_r);
6203 nonlocal_goto_handler_labels
6204 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6205 nonlocal_goto_handler_labels);
6206 /* ??? Do not let expand_label treat us as such since we would
6207 not want to be both on the list of non-local labels and on
6208 the list of forced labels. */
6209 FORCED_LABEL (label) = 0;
6210 return const0_rtx;
6211 }
6212 break;
6213
6214 case BUILT_IN_SETJMP_RECEIVER:
6215 /* __builtin_setjmp_receiver is passed the receiver label. */
6216 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6217 {
6218 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6219 rtx_insn *label_r = label_rtx (label);
6220
6221 expand_builtin_setjmp_receiver (label_r);
6222 return const0_rtx;
6223 }
6224 break;
6225
6226 /* __builtin_longjmp is passed a pointer to an array of five words.
6227 It's similar to the C library longjmp function but works with
6228 __builtin_setjmp above. */
6229 case BUILT_IN_LONGJMP:
6230 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6231 {
6232 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6233 VOIDmode, EXPAND_NORMAL);
6234 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6235
6236 if (value != const1_rtx)
6237 {
6238 error ("%<__builtin_longjmp%> second argument must be 1");
6239 return const0_rtx;
6240 }
6241
6242 expand_builtin_longjmp (buf_addr, value);
6243 return const0_rtx;
6244 }
6245 break;
6246
6247 case BUILT_IN_NONLOCAL_GOTO:
6248 target = expand_builtin_nonlocal_goto (exp);
6249 if (target)
6250 return target;
6251 break;
6252
6253 /* This updates the setjmp buffer that is its argument with the value
6254 of the current stack pointer. */
6255 case BUILT_IN_UPDATE_SETJMP_BUF:
6256 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6257 {
6258 rtx buf_addr
6259 = expand_normal (CALL_EXPR_ARG (exp, 0));
6260
6261 expand_builtin_update_setjmp_buf (buf_addr);
6262 return const0_rtx;
6263 }
6264 break;
6265
6266 case BUILT_IN_TRAP:
6267 expand_builtin_trap ();
6268 return const0_rtx;
6269
6270 case BUILT_IN_UNREACHABLE:
6271 expand_builtin_unreachable ();
6272 return const0_rtx;
6273
6274 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6275 case BUILT_IN_SIGNBITD32:
6276 case BUILT_IN_SIGNBITD64:
6277 case BUILT_IN_SIGNBITD128:
6278 target = expand_builtin_signbit (exp, target);
6279 if (target)
6280 return target;
6281 break;
6282
6283 /* Various hooks for the DWARF 2 __throw routine. */
6284 case BUILT_IN_UNWIND_INIT:
6285 expand_builtin_unwind_init ();
6286 return const0_rtx;
6287 case BUILT_IN_DWARF_CFA:
6288 return virtual_cfa_rtx;
6289 #ifdef DWARF2_UNWIND_INFO
6290 case BUILT_IN_DWARF_SP_COLUMN:
6291 return expand_builtin_dwarf_sp_column ();
6292 case BUILT_IN_INIT_DWARF_REG_SIZES:
6293 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6294 return const0_rtx;
6295 #endif
6296 case BUILT_IN_FROB_RETURN_ADDR:
6297 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6298 case BUILT_IN_EXTRACT_RETURN_ADDR:
6299 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6300 case BUILT_IN_EH_RETURN:
6301 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6302 CALL_EXPR_ARG (exp, 1));
6303 return const0_rtx;
6304 case BUILT_IN_EH_RETURN_DATA_REGNO:
6305 return expand_builtin_eh_return_data_regno (exp);
6306 case BUILT_IN_EXTEND_POINTER:
6307 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6308 case BUILT_IN_EH_POINTER:
6309 return expand_builtin_eh_pointer (exp);
6310 case BUILT_IN_EH_FILTER:
6311 return expand_builtin_eh_filter (exp);
6312 case BUILT_IN_EH_COPY_VALUES:
6313 return expand_builtin_eh_copy_values (exp);
6314
6315 case BUILT_IN_VA_START:
6316 return expand_builtin_va_start (exp);
6317 case BUILT_IN_VA_END:
6318 return expand_builtin_va_end (exp);
6319 case BUILT_IN_VA_COPY:
6320 return expand_builtin_va_copy (exp);
6321 case BUILT_IN_EXPECT:
6322 return expand_builtin_expect (exp, target);
6323 case BUILT_IN_ASSUME_ALIGNED:
6324 return expand_builtin_assume_aligned (exp, target);
6325 case BUILT_IN_PREFETCH:
6326 expand_builtin_prefetch (exp);
6327 return const0_rtx;
6328
6329 case BUILT_IN_INIT_TRAMPOLINE:
6330 return expand_builtin_init_trampoline (exp, true);
6331 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6332 return expand_builtin_init_trampoline (exp, false);
6333 case BUILT_IN_ADJUST_TRAMPOLINE:
6334 return expand_builtin_adjust_trampoline (exp);
6335
6336 case BUILT_IN_FORK:
6337 case BUILT_IN_EXECL:
6338 case BUILT_IN_EXECV:
6339 case BUILT_IN_EXECLP:
6340 case BUILT_IN_EXECLE:
6341 case BUILT_IN_EXECVP:
6342 case BUILT_IN_EXECVE:
6343 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6344 if (target)
6345 return target;
6346 break;
6347
6348 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6349 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6350 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6351 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6352 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6353 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6354 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6355 if (target)
6356 return target;
6357 break;
6358
6359 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6360 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6361 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6362 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6363 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6364 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6365 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6366 if (target)
6367 return target;
6368 break;
6369
6370 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6371 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6372 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6373 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6374 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6375 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6376 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6377 if (target)
6378 return target;
6379 break;
6380
6381 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6382 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6383 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6384 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6385 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6386 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6387 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6388 if (target)
6389 return target;
6390 break;
6391
6392 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6393 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6394 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6395 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6396 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6397 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6398 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6399 if (target)
6400 return target;
6401 break;
6402
6403 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6404 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6405 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6406 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6407 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6408 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6409 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6410 if (target)
6411 return target;
6412 break;
6413
6414 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6415 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6416 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6417 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6418 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6419 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6420 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6421 if (target)
6422 return target;
6423 break;
6424
6425 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6426 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6427 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6428 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6429 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6430 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6431 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6432 if (target)
6433 return target;
6434 break;
6435
6436 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6437 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6438 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6439 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6440 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6441 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6442 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6443 if (target)
6444 return target;
6445 break;
6446
6447 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6448 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6449 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6450 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6451 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6452 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6453 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6454 if (target)
6455 return target;
6456 break;
6457
6458 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6459 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6460 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6461 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6462 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6463 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6464 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6465 if (target)
6466 return target;
6467 break;
6468
6469 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6470 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6471 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6472 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6473 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6474 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6475 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6476 if (target)
6477 return target;
6478 break;
6479
6480 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6481 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6482 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6483 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6484 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6485 if (mode == VOIDmode)
6486 mode = TYPE_MODE (boolean_type_node);
6487 if (!target || !register_operand (target, mode))
6488 target = gen_reg_rtx (mode);
6489
6490 mode = get_builtin_sync_mode
6491 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6492 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6493 if (target)
6494 return target;
6495 break;
6496
6497 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6498 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6499 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6500 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6501 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6502 mode = get_builtin_sync_mode
6503 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6504 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6505 if (target)
6506 return target;
6507 break;
6508
6509 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6510 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6511 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6512 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6513 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6514 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6515 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6516 if (target)
6517 return target;
6518 break;
6519
6520 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6521 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6522 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6523 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6524 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6525 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6526 expand_builtin_sync_lock_release (mode, exp);
6527 return const0_rtx;
6528
6529 case BUILT_IN_SYNC_SYNCHRONIZE:
6530 expand_builtin_sync_synchronize ();
6531 return const0_rtx;
6532
6533 case BUILT_IN_ATOMIC_EXCHANGE_1:
6534 case BUILT_IN_ATOMIC_EXCHANGE_2:
6535 case BUILT_IN_ATOMIC_EXCHANGE_4:
6536 case BUILT_IN_ATOMIC_EXCHANGE_8:
6537 case BUILT_IN_ATOMIC_EXCHANGE_16:
6538 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6539 target = expand_builtin_atomic_exchange (mode, exp, target);
6540 if (target)
6541 return target;
6542 break;
6543
6544 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6545 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6546 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6547 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6548 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6549 {
6550 unsigned int nargs, z;
6551 vec<tree, va_gc> *vec;
6552
6553 mode =
6554 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6555 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6556 if (target)
6557 return target;
6558
6559 /* If this is turned into an external library call, the weak parameter
6560 must be dropped to match the expected parameter list. */
6561 nargs = call_expr_nargs (exp);
6562 vec_alloc (vec, nargs - 1);
6563 for (z = 0; z < 3; z++)
6564 vec->quick_push (CALL_EXPR_ARG (exp, z));
6565 /* Skip the boolean weak parameter. */
6566 for (z = 4; z < 6; z++)
6567 vec->quick_push (CALL_EXPR_ARG (exp, z));
6568 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6569 break;
6570 }
6571
6572 case BUILT_IN_ATOMIC_LOAD_1:
6573 case BUILT_IN_ATOMIC_LOAD_2:
6574 case BUILT_IN_ATOMIC_LOAD_4:
6575 case BUILT_IN_ATOMIC_LOAD_8:
6576 case BUILT_IN_ATOMIC_LOAD_16:
6577 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6578 target = expand_builtin_atomic_load (mode, exp, target);
6579 if (target)
6580 return target;
6581 break;
6582
6583 case BUILT_IN_ATOMIC_STORE_1:
6584 case BUILT_IN_ATOMIC_STORE_2:
6585 case BUILT_IN_ATOMIC_STORE_4:
6586 case BUILT_IN_ATOMIC_STORE_8:
6587 case BUILT_IN_ATOMIC_STORE_16:
6588 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6589 target = expand_builtin_atomic_store (mode, exp);
6590 if (target)
6591 return const0_rtx;
6592 break;
6593
6594 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6595 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6596 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6597 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6598 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6599 {
6600 enum built_in_function lib;
6601 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6602 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6603 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6604 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6605 ignore, lib);
6606 if (target)
6607 return target;
6608 break;
6609 }
6610 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6611 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6612 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6613 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6614 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6615 {
6616 enum built_in_function lib;
6617 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6618 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6619 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6620 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6621 ignore, lib);
6622 if (target)
6623 return target;
6624 break;
6625 }
6626 case BUILT_IN_ATOMIC_AND_FETCH_1:
6627 case BUILT_IN_ATOMIC_AND_FETCH_2:
6628 case BUILT_IN_ATOMIC_AND_FETCH_4:
6629 case BUILT_IN_ATOMIC_AND_FETCH_8:
6630 case BUILT_IN_ATOMIC_AND_FETCH_16:
6631 {
6632 enum built_in_function lib;
6633 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6634 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6635 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6636 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6637 ignore, lib);
6638 if (target)
6639 return target;
6640 break;
6641 }
6642 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6643 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6644 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6645 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6646 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6647 {
6648 enum built_in_function lib;
6649 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6650 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6651 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6652 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6653 ignore, lib);
6654 if (target)
6655 return target;
6656 break;
6657 }
6658 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6659 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6660 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6661 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6662 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6663 {
6664 enum built_in_function lib;
6665 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6666 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6667 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6668 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6669 ignore, lib);
6670 if (target)
6671 return target;
6672 break;
6673 }
6674 case BUILT_IN_ATOMIC_OR_FETCH_1:
6675 case BUILT_IN_ATOMIC_OR_FETCH_2:
6676 case BUILT_IN_ATOMIC_OR_FETCH_4:
6677 case BUILT_IN_ATOMIC_OR_FETCH_8:
6678 case BUILT_IN_ATOMIC_OR_FETCH_16:
6679 {
6680 enum built_in_function lib;
6681 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6682 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6683 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6684 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6685 ignore, lib);
6686 if (target)
6687 return target;
6688 break;
6689 }
6690 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6691 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6692 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6693 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6694 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6695 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6696 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6697 ignore, BUILT_IN_NONE);
6698 if (target)
6699 return target;
6700 break;
6701
6702 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6703 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6704 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6705 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6706 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6707 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6708 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6709 ignore, BUILT_IN_NONE);
6710 if (target)
6711 return target;
6712 break;
6713
6714 case BUILT_IN_ATOMIC_FETCH_AND_1:
6715 case BUILT_IN_ATOMIC_FETCH_AND_2:
6716 case BUILT_IN_ATOMIC_FETCH_AND_4:
6717 case BUILT_IN_ATOMIC_FETCH_AND_8:
6718 case BUILT_IN_ATOMIC_FETCH_AND_16:
6719 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6720 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6721 ignore, BUILT_IN_NONE);
6722 if (target)
6723 return target;
6724 break;
6725
6726 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6727 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6728 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6729 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6730 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6731 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6732 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6733 ignore, BUILT_IN_NONE);
6734 if (target)
6735 return target;
6736 break;
6737
6738 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6739 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6740 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6741 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6742 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6743 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6744 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6745 ignore, BUILT_IN_NONE);
6746 if (target)
6747 return target;
6748 break;
6749
6750 case BUILT_IN_ATOMIC_FETCH_OR_1:
6751 case BUILT_IN_ATOMIC_FETCH_OR_2:
6752 case BUILT_IN_ATOMIC_FETCH_OR_4:
6753 case BUILT_IN_ATOMIC_FETCH_OR_8:
6754 case BUILT_IN_ATOMIC_FETCH_OR_16:
6755 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6756 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6757 ignore, BUILT_IN_NONE);
6758 if (target)
6759 return target;
6760 break;
6761
6762 case BUILT_IN_ATOMIC_TEST_AND_SET:
6763 return expand_builtin_atomic_test_and_set (exp, target);
6764
6765 case BUILT_IN_ATOMIC_CLEAR:
6766 return expand_builtin_atomic_clear (exp);
6767
6768 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6769 return expand_builtin_atomic_always_lock_free (exp);
6770
6771 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6772 target = expand_builtin_atomic_is_lock_free (exp);
6773 if (target)
6774 return target;
6775 break;
6776
6777 case BUILT_IN_ATOMIC_THREAD_FENCE:
6778 expand_builtin_atomic_thread_fence (exp);
6779 return const0_rtx;
6780
6781 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6782 expand_builtin_atomic_signal_fence (exp);
6783 return const0_rtx;
6784
6785 case BUILT_IN_OBJECT_SIZE:
6786 return expand_builtin_object_size (exp);
6787
6788 case BUILT_IN_MEMCPY_CHK:
6789 case BUILT_IN_MEMPCPY_CHK:
6790 case BUILT_IN_MEMMOVE_CHK:
6791 case BUILT_IN_MEMSET_CHK:
6792 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6793 if (target)
6794 return target;
6795 break;
6796
6797 case BUILT_IN_STRCPY_CHK:
6798 case BUILT_IN_STPCPY_CHK:
6799 case BUILT_IN_STRNCPY_CHK:
6800 case BUILT_IN_STPNCPY_CHK:
6801 case BUILT_IN_STRCAT_CHK:
6802 case BUILT_IN_STRNCAT_CHK:
6803 case BUILT_IN_SNPRINTF_CHK:
6804 case BUILT_IN_VSNPRINTF_CHK:
6805 maybe_emit_chk_warning (exp, fcode);
6806 break;
6807
6808 case BUILT_IN_SPRINTF_CHK:
6809 case BUILT_IN_VSPRINTF_CHK:
6810 maybe_emit_sprintf_chk_warning (exp, fcode);
6811 break;
6812
6813 case BUILT_IN_FREE:
6814 if (warn_free_nonheap_object)
6815 maybe_emit_free_warning (exp);
6816 break;
6817
6818 case BUILT_IN_THREAD_POINTER:
6819 return expand_builtin_thread_pointer (exp, target);
6820
6821 case BUILT_IN_SET_THREAD_POINTER:
6822 expand_builtin_set_thread_pointer (exp);
6823 return const0_rtx;
6824
6825 case BUILT_IN_CILK_DETACH:
6826 expand_builtin_cilk_detach (exp);
6827 return const0_rtx;
6828
6829 case BUILT_IN_CILK_POP_FRAME:
6830 expand_builtin_cilk_pop_frame (exp);
6831 return const0_rtx;
6832
6833 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6834 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6835 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6836 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6837 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6838 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6839 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6840 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6841 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6842 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6843 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6844 /* We allow user CHKP builtins if Pointer Bounds
6845 Checker is off. */
6846 if (!chkp_function_instrumented_p (current_function_decl))
6847 {
6848 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6849 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6850 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6851 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6852 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6853 return expand_normal (CALL_EXPR_ARG (exp, 0));
6854 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6855 return expand_normal (size_zero_node);
6856 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6857 return expand_normal (size_int (-1));
6858 else
6859 return const0_rtx;
6860 }
6861 /* FALLTHROUGH */
6862
6863 case BUILT_IN_CHKP_BNDMK:
6864 case BUILT_IN_CHKP_BNDSTX:
6865 case BUILT_IN_CHKP_BNDCL:
6866 case BUILT_IN_CHKP_BNDCU:
6867 case BUILT_IN_CHKP_BNDLDX:
6868 case BUILT_IN_CHKP_BNDRET:
6869 case BUILT_IN_CHKP_INTERSECT:
6870 case BUILT_IN_CHKP_NARROW:
6871 case BUILT_IN_CHKP_EXTRACT_LOWER:
6872 case BUILT_IN_CHKP_EXTRACT_UPPER:
6873 /* Software implementation of Pointer Bounds Checker is NYI.
6874 Target support is required. */
6875 error ("Your target platform does not support -fcheck-pointer-bounds");
6876 break;
6877
6878 case BUILT_IN_ACC_ON_DEVICE:
6879 /* Do library call, if we failed to expand the builtin when
6880 folding. */
6881 break;
6882
6883 default: /* just do library call, if unknown builtin */
6884 break;
6885 }
6886
6887 /* The switch statement above can drop through to cause the function
6888 to be called normally. */
6889 return expand_call (exp, target, ignore);
6890 }
6891
6892 /* Similar to expand_builtin but is used for instrumented calls. */
6893
6894 rtx
6895 expand_builtin_with_bounds (tree exp, rtx target,
6896 rtx subtarget ATTRIBUTE_UNUSED,
6897 machine_mode mode, int ignore)
6898 {
6899 tree fndecl = get_callee_fndecl (exp);
6900 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6901
6902 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6903
6904 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6905 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6906
6907 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6908 && fcode < END_CHKP_BUILTINS);
6909
6910 switch (fcode)
6911 {
6912 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6913 target = expand_builtin_memcpy_with_bounds (exp, target);
6914 if (target)
6915 return target;
6916 break;
6917
6918 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6919 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6920 if (target)
6921 return target;
6922 break;
6923
6924 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6925 target = expand_builtin_memset_with_bounds (exp, target, mode);
6926 if (target)
6927 return target;
6928 break;
6929
6930 default:
6931 break;
6932 }
6933
6934 /* The switch statement above can drop through to cause the function
6935 to be called normally. */
6936 return expand_call (exp, target, ignore);
6937 }
6938
6939 /* Determine whether a tree node represents a call to a built-in
6940 function. If the tree T is a call to a built-in function with
6941 the right number of arguments of the appropriate types, return
6942 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6943 Otherwise the return value is END_BUILTINS. */
6944
6945 enum built_in_function
6946 builtin_mathfn_code (const_tree t)
6947 {
6948 const_tree fndecl, arg, parmlist;
6949 const_tree argtype, parmtype;
6950 const_call_expr_arg_iterator iter;
6951
6952 if (TREE_CODE (t) != CALL_EXPR
6953 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6954 return END_BUILTINS;
6955
6956 fndecl = get_callee_fndecl (t);
6957 if (fndecl == NULL_TREE
6958 || TREE_CODE (fndecl) != FUNCTION_DECL
6959 || ! DECL_BUILT_IN (fndecl)
6960 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6961 return END_BUILTINS;
6962
6963 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6964 init_const_call_expr_arg_iterator (t, &iter);
6965 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6966 {
6967 /* If a function doesn't take a variable number of arguments,
6968 the last element in the list will have type `void'. */
6969 parmtype = TREE_VALUE (parmlist);
6970 if (VOID_TYPE_P (parmtype))
6971 {
6972 if (more_const_call_expr_args_p (&iter))
6973 return END_BUILTINS;
6974 return DECL_FUNCTION_CODE (fndecl);
6975 }
6976
6977 if (! more_const_call_expr_args_p (&iter))
6978 return END_BUILTINS;
6979
6980 arg = next_const_call_expr_arg (&iter);
6981 argtype = TREE_TYPE (arg);
6982
6983 if (SCALAR_FLOAT_TYPE_P (parmtype))
6984 {
6985 if (! SCALAR_FLOAT_TYPE_P (argtype))
6986 return END_BUILTINS;
6987 }
6988 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6989 {
6990 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6991 return END_BUILTINS;
6992 }
6993 else if (POINTER_TYPE_P (parmtype))
6994 {
6995 if (! POINTER_TYPE_P (argtype))
6996 return END_BUILTINS;
6997 }
6998 else if (INTEGRAL_TYPE_P (parmtype))
6999 {
7000 if (! INTEGRAL_TYPE_P (argtype))
7001 return END_BUILTINS;
7002 }
7003 else
7004 return END_BUILTINS;
7005 }
7006
7007 /* Variable-length argument list. */
7008 return DECL_FUNCTION_CODE (fndecl);
7009 }
7010
7011 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7012 evaluate to a constant. */
7013
7014 static tree
7015 fold_builtin_constant_p (tree arg)
7016 {
7017 /* We return 1 for a numeric type that's known to be a constant
7018 value at compile-time or for an aggregate type that's a
7019 literal constant. */
7020 STRIP_NOPS (arg);
7021
7022 /* If we know this is a constant, emit the constant of one. */
7023 if (CONSTANT_CLASS_P (arg)
7024 || (TREE_CODE (arg) == CONSTRUCTOR
7025 && TREE_CONSTANT (arg)))
7026 return integer_one_node;
7027 if (TREE_CODE (arg) == ADDR_EXPR)
7028 {
7029 tree op = TREE_OPERAND (arg, 0);
7030 if (TREE_CODE (op) == STRING_CST
7031 || (TREE_CODE (op) == ARRAY_REF
7032 && integer_zerop (TREE_OPERAND (op, 1))
7033 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7034 return integer_one_node;
7035 }
7036
7037 /* If this expression has side effects, show we don't know it to be a
7038 constant. Likewise if it's a pointer or aggregate type since in
7039 those case we only want literals, since those are only optimized
7040 when generating RTL, not later.
7041 And finally, if we are compiling an initializer, not code, we
7042 need to return a definite result now; there's not going to be any
7043 more optimization done. */
7044 if (TREE_SIDE_EFFECTS (arg)
7045 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7046 || POINTER_TYPE_P (TREE_TYPE (arg))
7047 || cfun == 0
7048 || folding_initializer
7049 || force_folding_builtin_constant_p)
7050 return integer_zero_node;
7051
7052 return NULL_TREE;
7053 }
7054
7055 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7056 return it as a truthvalue. */
7057
7058 static tree
7059 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7060 tree predictor)
7061 {
7062 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7063
7064 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7065 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7066 ret_type = TREE_TYPE (TREE_TYPE (fn));
7067 pred_type = TREE_VALUE (arg_types);
7068 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7069
7070 pred = fold_convert_loc (loc, pred_type, pred);
7071 expected = fold_convert_loc (loc, expected_type, expected);
7072 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7073 predictor);
7074
7075 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7076 build_int_cst (ret_type, 0));
7077 }
7078
7079 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7080 NULL_TREE if no simplification is possible. */
7081
7082 tree
7083 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7084 {
7085 tree inner, fndecl, inner_arg0;
7086 enum tree_code code;
7087
7088 /* Distribute the expected value over short-circuiting operators.
7089 See through the cast from truthvalue_type_node to long. */
7090 inner_arg0 = arg0;
7091 while (CONVERT_EXPR_P (inner_arg0)
7092 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7093 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7094 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7095
7096 /* If this is a builtin_expect within a builtin_expect keep the
7097 inner one. See through a comparison against a constant. It
7098 might have been added to create a thruthvalue. */
7099 inner = inner_arg0;
7100
7101 if (COMPARISON_CLASS_P (inner)
7102 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7103 inner = TREE_OPERAND (inner, 0);
7104
7105 if (TREE_CODE (inner) == CALL_EXPR
7106 && (fndecl = get_callee_fndecl (inner))
7107 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7108 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7109 return arg0;
7110
7111 inner = inner_arg0;
7112 code = TREE_CODE (inner);
7113 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7114 {
7115 tree op0 = TREE_OPERAND (inner, 0);
7116 tree op1 = TREE_OPERAND (inner, 1);
7117
7118 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7119 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7120 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7121
7122 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7123 }
7124
7125 /* If the argument isn't invariant then there's nothing else we can do. */
7126 if (!TREE_CONSTANT (inner_arg0))
7127 return NULL_TREE;
7128
7129 /* If we expect that a comparison against the argument will fold to
7130 a constant return the constant. In practice, this means a true
7131 constant or the address of a non-weak symbol. */
7132 inner = inner_arg0;
7133 STRIP_NOPS (inner);
7134 if (TREE_CODE (inner) == ADDR_EXPR)
7135 {
7136 do
7137 {
7138 inner = TREE_OPERAND (inner, 0);
7139 }
7140 while (TREE_CODE (inner) == COMPONENT_REF
7141 || TREE_CODE (inner) == ARRAY_REF);
7142 if ((TREE_CODE (inner) == VAR_DECL
7143 || TREE_CODE (inner) == FUNCTION_DECL)
7144 && DECL_WEAK (inner))
7145 return NULL_TREE;
7146 }
7147
7148 /* Otherwise, ARG0 already has the proper type for the return value. */
7149 return arg0;
7150 }
7151
7152 /* Fold a call to __builtin_classify_type with argument ARG. */
7153
7154 static tree
7155 fold_builtin_classify_type (tree arg)
7156 {
7157 if (arg == 0)
7158 return build_int_cst (integer_type_node, no_type_class);
7159
7160 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7161 }
7162
7163 /* Fold a call to __builtin_strlen with argument ARG. */
7164
7165 static tree
7166 fold_builtin_strlen (location_t loc, tree type, tree arg)
7167 {
7168 if (!validate_arg (arg, POINTER_TYPE))
7169 return NULL_TREE;
7170 else
7171 {
7172 tree len = c_strlen (arg, 0);
7173
7174 if (len)
7175 return fold_convert_loc (loc, type, len);
7176
7177 return NULL_TREE;
7178 }
7179 }
7180
7181 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7182
7183 static tree
7184 fold_builtin_inf (location_t loc, tree type, int warn)
7185 {
7186 REAL_VALUE_TYPE real;
7187
7188 /* __builtin_inff is intended to be usable to define INFINITY on all
7189 targets. If an infinity is not available, INFINITY expands "to a
7190 positive constant of type float that overflows at translation
7191 time", footnote "In this case, using INFINITY will violate the
7192 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7193 Thus we pedwarn to ensure this constraint violation is
7194 diagnosed. */
7195 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7196 pedwarn (loc, 0, "target format does not support infinity");
7197
7198 real_inf (&real);
7199 return build_real (type, real);
7200 }
7201
7202 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7203 NULL_TREE if no simplification can be made. */
7204
7205 static tree
7206 fold_builtin_sincos (location_t loc,
7207 tree arg0, tree arg1, tree arg2)
7208 {
7209 tree type;
7210 tree fndecl, call = NULL_TREE;
7211
7212 if (!validate_arg (arg0, REAL_TYPE)
7213 || !validate_arg (arg1, POINTER_TYPE)
7214 || !validate_arg (arg2, POINTER_TYPE))
7215 return NULL_TREE;
7216
7217 type = TREE_TYPE (arg0);
7218
7219 /* Calculate the result when the argument is a constant. */
7220 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7221 if (fn == END_BUILTINS)
7222 return NULL_TREE;
7223
7224 /* Canonicalize sincos to cexpi. */
7225 if (TREE_CODE (arg0) == REAL_CST)
7226 {
7227 tree complex_type = build_complex_type (type);
7228 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7229 }
7230 if (!call)
7231 {
7232 if (!targetm.libc_has_function (function_c99_math_complex)
7233 || !builtin_decl_implicit_p (fn))
7234 return NULL_TREE;
7235 fndecl = builtin_decl_explicit (fn);
7236 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7237 call = builtin_save_expr (call);
7238 }
7239
7240 return build2 (COMPOUND_EXPR, void_type_node,
7241 build2 (MODIFY_EXPR, void_type_node,
7242 build_fold_indirect_ref_loc (loc, arg1),
7243 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7244 build2 (MODIFY_EXPR, void_type_node,
7245 build_fold_indirect_ref_loc (loc, arg2),
7246 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7247 }
7248
7249 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7250 arguments to the call, and TYPE is its return type.
7251 Return NULL_TREE if no simplification can be made. */
7252
7253 static tree
7254 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7255 {
7256 if (!validate_arg (arg1, POINTER_TYPE)
7257 || !validate_arg (arg2, INTEGER_TYPE)
7258 || !validate_arg (len, INTEGER_TYPE))
7259 return NULL_TREE;
7260 else
7261 {
7262 const char *p1;
7263
7264 if (TREE_CODE (arg2) != INTEGER_CST
7265 || !tree_fits_uhwi_p (len))
7266 return NULL_TREE;
7267
7268 p1 = c_getstr (arg1);
7269 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7270 {
7271 char c;
7272 const char *r;
7273 tree tem;
7274
7275 if (target_char_cast (arg2, &c))
7276 return NULL_TREE;
7277
7278 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7279
7280 if (r == NULL)
7281 return build_int_cst (TREE_TYPE (arg1), 0);
7282
7283 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7284 return fold_convert_loc (loc, type, tem);
7285 }
7286 return NULL_TREE;
7287 }
7288 }
7289
7290 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7291 Return NULL_TREE if no simplification can be made. */
7292
7293 static tree
7294 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7295 {
7296 if (!validate_arg (arg1, POINTER_TYPE)
7297 || !validate_arg (arg2, POINTER_TYPE)
7298 || !validate_arg (len, INTEGER_TYPE))
7299 return NULL_TREE;
7300
7301 /* If the LEN parameter is zero, return zero. */
7302 if (integer_zerop (len))
7303 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7304 arg1, arg2);
7305
7306 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7307 if (operand_equal_p (arg1, arg2, 0))
7308 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7309
7310 /* If len parameter is one, return an expression corresponding to
7311 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7312 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7313 {
7314 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7315 tree cst_uchar_ptr_node
7316 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7317
7318 tree ind1
7319 = fold_convert_loc (loc, integer_type_node,
7320 build1 (INDIRECT_REF, cst_uchar_node,
7321 fold_convert_loc (loc,
7322 cst_uchar_ptr_node,
7323 arg1)));
7324 tree ind2
7325 = fold_convert_loc (loc, integer_type_node,
7326 build1 (INDIRECT_REF, cst_uchar_node,
7327 fold_convert_loc (loc,
7328 cst_uchar_ptr_node,
7329 arg2)));
7330 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7331 }
7332
7333 return NULL_TREE;
7334 }
7335
7336 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7337 Return NULL_TREE if no simplification can be made. */
7338
7339 static tree
7340 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7341 {
7342 if (!validate_arg (arg1, POINTER_TYPE)
7343 || !validate_arg (arg2, POINTER_TYPE))
7344 return NULL_TREE;
7345
7346 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7347 if (operand_equal_p (arg1, arg2, 0))
7348 return integer_zero_node;
7349
7350 /* If the second arg is "", return *(const unsigned char*)arg1. */
7351 const char *p2 = c_getstr (arg2);
7352 if (p2 && *p2 == '\0')
7353 {
7354 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7355 tree cst_uchar_ptr_node
7356 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7357
7358 return fold_convert_loc (loc, integer_type_node,
7359 build1 (INDIRECT_REF, cst_uchar_node,
7360 fold_convert_loc (loc,
7361 cst_uchar_ptr_node,
7362 arg1)));
7363 }
7364
7365 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7366 const char *p1 = c_getstr (arg1);
7367 if (p1 && *p1 == '\0')
7368 {
7369 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7370 tree cst_uchar_ptr_node
7371 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7372
7373 tree temp
7374 = fold_convert_loc (loc, integer_type_node,
7375 build1 (INDIRECT_REF, cst_uchar_node,
7376 fold_convert_loc (loc,
7377 cst_uchar_ptr_node,
7378 arg2)));
7379 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7380 }
7381
7382 return NULL_TREE;
7383 }
7384
7385 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7386 Return NULL_TREE if no simplification can be made. */
7387
7388 static tree
7389 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7390 {
7391 if (!validate_arg (arg1, POINTER_TYPE)
7392 || !validate_arg (arg2, POINTER_TYPE)
7393 || !validate_arg (len, INTEGER_TYPE))
7394 return NULL_TREE;
7395
7396 /* If the LEN parameter is zero, return zero. */
7397 if (integer_zerop (len))
7398 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7399 arg1, arg2);
7400
7401 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7402 if (operand_equal_p (arg1, arg2, 0))
7403 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7404
7405 /* If the second arg is "", and the length is greater than zero,
7406 return *(const unsigned char*)arg1. */
7407 const char *p2 = c_getstr (arg2);
7408 if (p2 && *p2 == '\0'
7409 && TREE_CODE (len) == INTEGER_CST
7410 && tree_int_cst_sgn (len) == 1)
7411 {
7412 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7413 tree cst_uchar_ptr_node
7414 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7415
7416 return fold_convert_loc (loc, integer_type_node,
7417 build1 (INDIRECT_REF, cst_uchar_node,
7418 fold_convert_loc (loc,
7419 cst_uchar_ptr_node,
7420 arg1)));
7421 }
7422
7423 /* If the first arg is "", and the length is greater than zero,
7424 return -*(const unsigned char*)arg2. */
7425 const char *p1 = c_getstr (arg1);
7426 if (p1 && *p1 == '\0'
7427 && TREE_CODE (len) == INTEGER_CST
7428 && tree_int_cst_sgn (len) == 1)
7429 {
7430 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7431 tree cst_uchar_ptr_node
7432 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7433
7434 tree temp = fold_convert_loc (loc, integer_type_node,
7435 build1 (INDIRECT_REF, cst_uchar_node,
7436 fold_convert_loc (loc,
7437 cst_uchar_ptr_node,
7438 arg2)));
7439 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7440 }
7441
7442 /* If len parameter is one, return an expression corresponding to
7443 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7444 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7445 {
7446 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7447 tree cst_uchar_ptr_node
7448 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7449
7450 tree ind1 = fold_convert_loc (loc, integer_type_node,
7451 build1 (INDIRECT_REF, cst_uchar_node,
7452 fold_convert_loc (loc,
7453 cst_uchar_ptr_node,
7454 arg1)));
7455 tree ind2 = fold_convert_loc (loc, integer_type_node,
7456 build1 (INDIRECT_REF, cst_uchar_node,
7457 fold_convert_loc (loc,
7458 cst_uchar_ptr_node,
7459 arg2)));
7460 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7461 }
7462
7463 return NULL_TREE;
7464 }
7465
7466 /* Fold a call to builtin isascii with argument ARG. */
7467
7468 static tree
7469 fold_builtin_isascii (location_t loc, tree arg)
7470 {
7471 if (!validate_arg (arg, INTEGER_TYPE))
7472 return NULL_TREE;
7473 else
7474 {
7475 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7476 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7477 build_int_cst (integer_type_node,
7478 ~ (unsigned HOST_WIDE_INT) 0x7f));
7479 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7480 arg, integer_zero_node);
7481 }
7482 }
7483
7484 /* Fold a call to builtin toascii with argument ARG. */
7485
7486 static tree
7487 fold_builtin_toascii (location_t loc, tree arg)
7488 {
7489 if (!validate_arg (arg, INTEGER_TYPE))
7490 return NULL_TREE;
7491
7492 /* Transform toascii(c) -> (c & 0x7f). */
7493 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7494 build_int_cst (integer_type_node, 0x7f));
7495 }
7496
7497 /* Fold a call to builtin isdigit with argument ARG. */
7498
7499 static tree
7500 fold_builtin_isdigit (location_t loc, tree arg)
7501 {
7502 if (!validate_arg (arg, INTEGER_TYPE))
7503 return NULL_TREE;
7504 else
7505 {
7506 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7507 /* According to the C standard, isdigit is unaffected by locale.
7508 However, it definitely is affected by the target character set. */
7509 unsigned HOST_WIDE_INT target_digit0
7510 = lang_hooks.to_target_charset ('0');
7511
7512 if (target_digit0 == 0)
7513 return NULL_TREE;
7514
7515 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7516 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7517 build_int_cst (unsigned_type_node, target_digit0));
7518 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7519 build_int_cst (unsigned_type_node, 9));
7520 }
7521 }
7522
7523 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7524
7525 static tree
7526 fold_builtin_fabs (location_t loc, tree arg, tree type)
7527 {
7528 if (!validate_arg (arg, REAL_TYPE))
7529 return NULL_TREE;
7530
7531 arg = fold_convert_loc (loc, type, arg);
7532 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7533 }
7534
7535 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7536
7537 static tree
7538 fold_builtin_abs (location_t loc, tree arg, tree type)
7539 {
7540 if (!validate_arg (arg, INTEGER_TYPE))
7541 return NULL_TREE;
7542
7543 arg = fold_convert_loc (loc, type, arg);
7544 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7545 }
7546
7547 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7548
7549 static tree
7550 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7551 {
7552 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7553 if (validate_arg (arg0, REAL_TYPE)
7554 && validate_arg (arg1, REAL_TYPE)
7555 && validate_arg (arg2, REAL_TYPE)
7556 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7557 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7558
7559 return NULL_TREE;
7560 }
7561
7562 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7563
7564 static tree
7565 fold_builtin_carg (location_t loc, tree arg, tree type)
7566 {
7567 if (validate_arg (arg, COMPLEX_TYPE)
7568 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7569 {
7570 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7571
7572 if (atan2_fn)
7573 {
7574 tree new_arg = builtin_save_expr (arg);
7575 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7576 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7577 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7578 }
7579 }
7580
7581 return NULL_TREE;
7582 }
7583
7584 /* Fold a call to builtin frexp, we can assume the base is 2. */
7585
7586 static tree
7587 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7588 {
7589 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7590 return NULL_TREE;
7591
7592 STRIP_NOPS (arg0);
7593
7594 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7595 return NULL_TREE;
7596
7597 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7598
7599 /* Proceed if a valid pointer type was passed in. */
7600 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7601 {
7602 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7603 tree frac, exp;
7604
7605 switch (value->cl)
7606 {
7607 case rvc_zero:
7608 /* For +-0, return (*exp = 0, +-0). */
7609 exp = integer_zero_node;
7610 frac = arg0;
7611 break;
7612 case rvc_nan:
7613 case rvc_inf:
7614 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7615 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7616 case rvc_normal:
7617 {
7618 /* Since the frexp function always expects base 2, and in
7619 GCC normalized significands are already in the range
7620 [0.5, 1.0), we have exactly what frexp wants. */
7621 REAL_VALUE_TYPE frac_rvt = *value;
7622 SET_REAL_EXP (&frac_rvt, 0);
7623 frac = build_real (rettype, frac_rvt);
7624 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7625 }
7626 break;
7627 default:
7628 gcc_unreachable ();
7629 }
7630
7631 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7632 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7633 TREE_SIDE_EFFECTS (arg1) = 1;
7634 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7635 }
7636
7637 return NULL_TREE;
7638 }
7639
7640 /* Fold a call to builtin modf. */
7641
7642 static tree
7643 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7644 {
7645 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7646 return NULL_TREE;
7647
7648 STRIP_NOPS (arg0);
7649
7650 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7651 return NULL_TREE;
7652
7653 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7654
7655 /* Proceed if a valid pointer type was passed in. */
7656 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7657 {
7658 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7659 REAL_VALUE_TYPE trunc, frac;
7660
7661 switch (value->cl)
7662 {
7663 case rvc_nan:
7664 case rvc_zero:
7665 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7666 trunc = frac = *value;
7667 break;
7668 case rvc_inf:
7669 /* For +-Inf, return (*arg1 = arg0, +-0). */
7670 frac = dconst0;
7671 frac.sign = value->sign;
7672 trunc = *value;
7673 break;
7674 case rvc_normal:
7675 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7676 real_trunc (&trunc, VOIDmode, value);
7677 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7678 /* If the original number was negative and already
7679 integral, then the fractional part is -0.0. */
7680 if (value->sign && frac.cl == rvc_zero)
7681 frac.sign = value->sign;
7682 break;
7683 }
7684
7685 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7686 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7687 build_real (rettype, trunc));
7688 TREE_SIDE_EFFECTS (arg1) = 1;
7689 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7690 build_real (rettype, frac));
7691 }
7692
7693 return NULL_TREE;
7694 }
7695
7696 /* Given a location LOC, an interclass builtin function decl FNDECL
7697 and its single argument ARG, return an folded expression computing
7698 the same, or NULL_TREE if we either couldn't or didn't want to fold
7699 (the latter happen if there's an RTL instruction available). */
7700
7701 static tree
7702 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7703 {
7704 machine_mode mode;
7705
7706 if (!validate_arg (arg, REAL_TYPE))
7707 return NULL_TREE;
7708
7709 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7710 return NULL_TREE;
7711
7712 mode = TYPE_MODE (TREE_TYPE (arg));
7713
7714 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7715
7716 /* If there is no optab, try generic code. */
7717 switch (DECL_FUNCTION_CODE (fndecl))
7718 {
7719 tree result;
7720
7721 CASE_FLT_FN (BUILT_IN_ISINF):
7722 {
7723 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7724 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7725 tree type = TREE_TYPE (arg);
7726 REAL_VALUE_TYPE r;
7727 char buf[128];
7728
7729 if (is_ibm_extended)
7730 {
7731 /* NaN and Inf are encoded in the high-order double value
7732 only. The low-order value is not significant. */
7733 type = double_type_node;
7734 mode = DFmode;
7735 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7736 }
7737 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7738 real_from_string (&r, buf);
7739 result = build_call_expr (isgr_fn, 2,
7740 fold_build1_loc (loc, ABS_EXPR, type, arg),
7741 build_real (type, r));
7742 return result;
7743 }
7744 CASE_FLT_FN (BUILT_IN_FINITE):
7745 case BUILT_IN_ISFINITE:
7746 {
7747 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7748 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7749 tree type = TREE_TYPE (arg);
7750 REAL_VALUE_TYPE r;
7751 char buf[128];
7752
7753 if (is_ibm_extended)
7754 {
7755 /* NaN and Inf are encoded in the high-order double value
7756 only. The low-order value is not significant. */
7757 type = double_type_node;
7758 mode = DFmode;
7759 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7760 }
7761 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7762 real_from_string (&r, buf);
7763 result = build_call_expr (isle_fn, 2,
7764 fold_build1_loc (loc, ABS_EXPR, type, arg),
7765 build_real (type, r));
7766 /*result = fold_build2_loc (loc, UNGT_EXPR,
7767 TREE_TYPE (TREE_TYPE (fndecl)),
7768 fold_build1_loc (loc, ABS_EXPR, type, arg),
7769 build_real (type, r));
7770 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7771 TREE_TYPE (TREE_TYPE (fndecl)),
7772 result);*/
7773 return result;
7774 }
7775 case BUILT_IN_ISNORMAL:
7776 {
7777 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7778 islessequal(fabs(x),DBL_MAX). */
7779 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7780 tree type = TREE_TYPE (arg);
7781 tree orig_arg, max_exp, min_exp;
7782 machine_mode orig_mode = mode;
7783 REAL_VALUE_TYPE rmax, rmin;
7784 char buf[128];
7785
7786 orig_arg = arg = builtin_save_expr (arg);
7787 if (is_ibm_extended)
7788 {
7789 /* Use double to test the normal range of IBM extended
7790 precision. Emin for IBM extended precision is
7791 different to emin for IEEE double, being 53 higher
7792 since the low double exponent is at least 53 lower
7793 than the high double exponent. */
7794 type = double_type_node;
7795 mode = DFmode;
7796 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7797 }
7798 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7799
7800 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7801 real_from_string (&rmax, buf);
7802 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7803 real_from_string (&rmin, buf);
7804 max_exp = build_real (type, rmax);
7805 min_exp = build_real (type, rmin);
7806
7807 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7808 if (is_ibm_extended)
7809 {
7810 /* Testing the high end of the range is done just using
7811 the high double, using the same test as isfinite().
7812 For the subnormal end of the range we first test the
7813 high double, then if its magnitude is equal to the
7814 limit of 0x1p-969, we test whether the low double is
7815 non-zero and opposite sign to the high double. */
7816 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7817 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7818 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7819 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7820 arg, min_exp);
7821 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7822 complex_double_type_node, orig_arg);
7823 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7824 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7825 tree zero = build_real (type, dconst0);
7826 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7827 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7828 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7829 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7830 fold_build3 (COND_EXPR,
7831 integer_type_node,
7832 hilt, logt, lolt));
7833 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7834 eq_min, ok_lo);
7835 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7836 gt_min, eq_min);
7837 }
7838 else
7839 {
7840 tree const isge_fn
7841 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7842 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7843 }
7844 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7845 max_exp, min_exp);
7846 return result;
7847 }
7848 default:
7849 break;
7850 }
7851
7852 return NULL_TREE;
7853 }
7854
7855 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7856 ARG is the argument for the call. */
7857
7858 static tree
7859 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7860 {
7861 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7862
7863 if (!validate_arg (arg, REAL_TYPE))
7864 return NULL_TREE;
7865
7866 switch (builtin_index)
7867 {
7868 case BUILT_IN_ISINF:
7869 if (!HONOR_INFINITIES (arg))
7870 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7871
7872 return NULL_TREE;
7873
7874 case BUILT_IN_ISINF_SIGN:
7875 {
7876 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7877 /* In a boolean context, GCC will fold the inner COND_EXPR to
7878 1. So e.g. "if (isinf_sign(x))" would be folded to just
7879 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7880 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7881 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7882 tree tmp = NULL_TREE;
7883
7884 arg = builtin_save_expr (arg);
7885
7886 if (signbit_fn && isinf_fn)
7887 {
7888 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7889 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7890
7891 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7892 signbit_call, integer_zero_node);
7893 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7894 isinf_call, integer_zero_node);
7895
7896 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7897 integer_minus_one_node, integer_one_node);
7898 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7899 isinf_call, tmp,
7900 integer_zero_node);
7901 }
7902
7903 return tmp;
7904 }
7905
7906 case BUILT_IN_ISFINITE:
7907 if (!HONOR_NANS (arg)
7908 && !HONOR_INFINITIES (arg))
7909 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7910
7911 return NULL_TREE;
7912
7913 case BUILT_IN_ISNAN:
7914 if (!HONOR_NANS (arg))
7915 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7916
7917 {
7918 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7919 if (is_ibm_extended)
7920 {
7921 /* NaN and Inf are encoded in the high-order double value
7922 only. The low-order value is not significant. */
7923 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7924 }
7925 }
7926 arg = builtin_save_expr (arg);
7927 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7928
7929 default:
7930 gcc_unreachable ();
7931 }
7932 }
7933
7934 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7935 This builtin will generate code to return the appropriate floating
7936 point classification depending on the value of the floating point
7937 number passed in. The possible return values must be supplied as
7938 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7939 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7940 one floating point argument which is "type generic". */
7941
7942 static tree
7943 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7944 {
7945 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7946 arg, type, res, tmp;
7947 machine_mode mode;
7948 REAL_VALUE_TYPE r;
7949 char buf[128];
7950
7951 /* Verify the required arguments in the original call. */
7952 if (nargs != 6
7953 || !validate_arg (args[0], INTEGER_TYPE)
7954 || !validate_arg (args[1], INTEGER_TYPE)
7955 || !validate_arg (args[2], INTEGER_TYPE)
7956 || !validate_arg (args[3], INTEGER_TYPE)
7957 || !validate_arg (args[4], INTEGER_TYPE)
7958 || !validate_arg (args[5], REAL_TYPE))
7959 return NULL_TREE;
7960
7961 fp_nan = args[0];
7962 fp_infinite = args[1];
7963 fp_normal = args[2];
7964 fp_subnormal = args[3];
7965 fp_zero = args[4];
7966 arg = args[5];
7967 type = TREE_TYPE (arg);
7968 mode = TYPE_MODE (type);
7969 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7970
7971 /* fpclassify(x) ->
7972 isnan(x) ? FP_NAN :
7973 (fabs(x) == Inf ? FP_INFINITE :
7974 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7975 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7976
7977 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7978 build_real (type, dconst0));
7979 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7980 tmp, fp_zero, fp_subnormal);
7981
7982 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7983 real_from_string (&r, buf);
7984 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7985 arg, build_real (type, r));
7986 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7987
7988 if (HONOR_INFINITIES (mode))
7989 {
7990 real_inf (&r);
7991 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7992 build_real (type, r));
7993 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7994 fp_infinite, res);
7995 }
7996
7997 if (HONOR_NANS (mode))
7998 {
7999 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8000 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8001 }
8002
8003 return res;
8004 }
8005
8006 /* Fold a call to an unordered comparison function such as
8007 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8008 being called and ARG0 and ARG1 are the arguments for the call.
8009 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8010 the opposite of the desired result. UNORDERED_CODE is used
8011 for modes that can hold NaNs and ORDERED_CODE is used for
8012 the rest. */
8013
8014 static tree
8015 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8016 enum tree_code unordered_code,
8017 enum tree_code ordered_code)
8018 {
8019 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8020 enum tree_code code;
8021 tree type0, type1;
8022 enum tree_code code0, code1;
8023 tree cmp_type = NULL_TREE;
8024
8025 type0 = TREE_TYPE (arg0);
8026 type1 = TREE_TYPE (arg1);
8027
8028 code0 = TREE_CODE (type0);
8029 code1 = TREE_CODE (type1);
8030
8031 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8032 /* Choose the wider of two real types. */
8033 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8034 ? type0 : type1;
8035 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8036 cmp_type = type0;
8037 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8038 cmp_type = type1;
8039
8040 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8041 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8042
8043 if (unordered_code == UNORDERED_EXPR)
8044 {
8045 if (!HONOR_NANS (arg0))
8046 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8047 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8048 }
8049
8050 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8051 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8052 fold_build2_loc (loc, code, type, arg0, arg1));
8053 }
8054
8055 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8056 arithmetics if it can never overflow, or into internal functions that
8057 return both result of arithmetics and overflowed boolean flag in
8058 a complex integer result, or some other check for overflow.
8059 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8060 checking part of that. */
8061
8062 static tree
8063 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8064 tree arg0, tree arg1, tree arg2)
8065 {
8066 enum internal_fn ifn = IFN_LAST;
8067 /* The code of the expression corresponding to the type-generic
8068 built-in, or ERROR_MARK for the type-specific ones. */
8069 enum tree_code opcode = ERROR_MARK;
8070 bool ovf_only = false;
8071
8072 switch (fcode)
8073 {
8074 case BUILT_IN_ADD_OVERFLOW_P:
8075 ovf_only = true;
8076 /* FALLTHRU */
8077 case BUILT_IN_ADD_OVERFLOW:
8078 opcode = PLUS_EXPR;
8079 /* FALLTHRU */
8080 case BUILT_IN_SADD_OVERFLOW:
8081 case BUILT_IN_SADDL_OVERFLOW:
8082 case BUILT_IN_SADDLL_OVERFLOW:
8083 case BUILT_IN_UADD_OVERFLOW:
8084 case BUILT_IN_UADDL_OVERFLOW:
8085 case BUILT_IN_UADDLL_OVERFLOW:
8086 ifn = IFN_ADD_OVERFLOW;
8087 break;
8088 case BUILT_IN_SUB_OVERFLOW_P:
8089 ovf_only = true;
8090 /* FALLTHRU */
8091 case BUILT_IN_SUB_OVERFLOW:
8092 opcode = MINUS_EXPR;
8093 /* FALLTHRU */
8094 case BUILT_IN_SSUB_OVERFLOW:
8095 case BUILT_IN_SSUBL_OVERFLOW:
8096 case BUILT_IN_SSUBLL_OVERFLOW:
8097 case BUILT_IN_USUB_OVERFLOW:
8098 case BUILT_IN_USUBL_OVERFLOW:
8099 case BUILT_IN_USUBLL_OVERFLOW:
8100 ifn = IFN_SUB_OVERFLOW;
8101 break;
8102 case BUILT_IN_MUL_OVERFLOW_P:
8103 ovf_only = true;
8104 /* FALLTHRU */
8105 case BUILT_IN_MUL_OVERFLOW:
8106 opcode = MULT_EXPR;
8107 /* FALLTHRU */
8108 case BUILT_IN_SMUL_OVERFLOW:
8109 case BUILT_IN_SMULL_OVERFLOW:
8110 case BUILT_IN_SMULLL_OVERFLOW:
8111 case BUILT_IN_UMUL_OVERFLOW:
8112 case BUILT_IN_UMULL_OVERFLOW:
8113 case BUILT_IN_UMULLL_OVERFLOW:
8114 ifn = IFN_MUL_OVERFLOW;
8115 break;
8116 default:
8117 gcc_unreachable ();
8118 }
8119
8120 /* For the "generic" overloads, the first two arguments can have different
8121 types and the last argument determines the target type to use to check
8122 for overflow. The arguments of the other overloads all have the same
8123 type. */
8124 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8125
8126 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8127 arguments are constant, attempt to fold the built-in call into a constant
8128 expression indicating whether or not it detected an overflow. */
8129 if (ovf_only
8130 && TREE_CODE (arg0) == INTEGER_CST
8131 && TREE_CODE (arg1) == INTEGER_CST)
8132 /* Perform the computation in the target type and check for overflow. */
8133 return omit_one_operand_loc (loc, boolean_type_node,
8134 arith_overflowed_p (opcode, type, arg0, arg1)
8135 ? boolean_true_node : boolean_false_node,
8136 arg2);
8137
8138 tree ctype = build_complex_type (type);
8139 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8140 2, arg0, arg1);
8141 tree tgt = save_expr (call);
8142 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8143 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8144 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8145
8146 if (ovf_only)
8147 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8148
8149 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8150 tree store
8151 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8152 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8153 }
8154
8155 /* Fold a call to __builtin_FILE to a constant string. */
8156
8157 static inline tree
8158 fold_builtin_FILE (location_t loc)
8159 {
8160 if (const char *fname = LOCATION_FILE (loc))
8161 return build_string_literal (strlen (fname) + 1, fname);
8162
8163 return build_string_literal (1, "");
8164 }
8165
8166 /* Fold a call to __builtin_FUNCTION to a constant string. */
8167
8168 static inline tree
8169 fold_builtin_FUNCTION ()
8170 {
8171 if (current_function_decl)
8172 {
8173 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8174 return build_string_literal (strlen (name) + 1, name);
8175 }
8176
8177 return build_string_literal (1, "");
8178 }
8179
8180 /* Fold a call to __builtin_LINE to an integer constant. */
8181
8182 static inline tree
8183 fold_builtin_LINE (location_t loc, tree type)
8184 {
8185 return build_int_cst (type, LOCATION_LINE (loc));
8186 }
8187
8188 /* Fold a call to built-in function FNDECL with 0 arguments.
8189 This function returns NULL_TREE if no simplification was possible. */
8190
8191 static tree
8192 fold_builtin_0 (location_t loc, tree fndecl)
8193 {
8194 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8195 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8196 switch (fcode)
8197 {
8198 case BUILT_IN_FILE:
8199 return fold_builtin_FILE (loc);
8200
8201 case BUILT_IN_FUNCTION:
8202 return fold_builtin_FUNCTION ();
8203
8204 case BUILT_IN_LINE:
8205 return fold_builtin_LINE (loc, type);
8206
8207 CASE_FLT_FN (BUILT_IN_INF):
8208 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8209 case BUILT_IN_INFD32:
8210 case BUILT_IN_INFD64:
8211 case BUILT_IN_INFD128:
8212 return fold_builtin_inf (loc, type, true);
8213
8214 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8215 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8216 return fold_builtin_inf (loc, type, false);
8217
8218 case BUILT_IN_CLASSIFY_TYPE:
8219 return fold_builtin_classify_type (NULL_TREE);
8220
8221 default:
8222 break;
8223 }
8224 return NULL_TREE;
8225 }
8226
8227 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8228 This function returns NULL_TREE if no simplification was possible. */
8229
8230 static tree
8231 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8232 {
8233 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8234 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8235
8236 if (TREE_CODE (arg0) == ERROR_MARK)
8237 return NULL_TREE;
8238
8239 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8240 return ret;
8241
8242 switch (fcode)
8243 {
8244 case BUILT_IN_CONSTANT_P:
8245 {
8246 tree val = fold_builtin_constant_p (arg0);
8247
8248 /* Gimplification will pull the CALL_EXPR for the builtin out of
8249 an if condition. When not optimizing, we'll not CSE it back.
8250 To avoid link error types of regressions, return false now. */
8251 if (!val && !optimize)
8252 val = integer_zero_node;
8253
8254 return val;
8255 }
8256
8257 case BUILT_IN_CLASSIFY_TYPE:
8258 return fold_builtin_classify_type (arg0);
8259
8260 case BUILT_IN_STRLEN:
8261 return fold_builtin_strlen (loc, type, arg0);
8262
8263 CASE_FLT_FN (BUILT_IN_FABS):
8264 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8265 case BUILT_IN_FABSD32:
8266 case BUILT_IN_FABSD64:
8267 case BUILT_IN_FABSD128:
8268 return fold_builtin_fabs (loc, arg0, type);
8269
8270 case BUILT_IN_ABS:
8271 case BUILT_IN_LABS:
8272 case BUILT_IN_LLABS:
8273 case BUILT_IN_IMAXABS:
8274 return fold_builtin_abs (loc, arg0, type);
8275
8276 CASE_FLT_FN (BUILT_IN_CONJ):
8277 if (validate_arg (arg0, COMPLEX_TYPE)
8278 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8279 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8280 break;
8281
8282 CASE_FLT_FN (BUILT_IN_CREAL):
8283 if (validate_arg (arg0, COMPLEX_TYPE)
8284 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8285 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8286 break;
8287
8288 CASE_FLT_FN (BUILT_IN_CIMAG):
8289 if (validate_arg (arg0, COMPLEX_TYPE)
8290 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8291 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8292 break;
8293
8294 CASE_FLT_FN (BUILT_IN_CARG):
8295 return fold_builtin_carg (loc, arg0, type);
8296
8297 case BUILT_IN_ISASCII:
8298 return fold_builtin_isascii (loc, arg0);
8299
8300 case BUILT_IN_TOASCII:
8301 return fold_builtin_toascii (loc, arg0);
8302
8303 case BUILT_IN_ISDIGIT:
8304 return fold_builtin_isdigit (loc, arg0);
8305
8306 CASE_FLT_FN (BUILT_IN_FINITE):
8307 case BUILT_IN_FINITED32:
8308 case BUILT_IN_FINITED64:
8309 case BUILT_IN_FINITED128:
8310 case BUILT_IN_ISFINITE:
8311 {
8312 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8313 if (ret)
8314 return ret;
8315 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8316 }
8317
8318 CASE_FLT_FN (BUILT_IN_ISINF):
8319 case BUILT_IN_ISINFD32:
8320 case BUILT_IN_ISINFD64:
8321 case BUILT_IN_ISINFD128:
8322 {
8323 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8324 if (ret)
8325 return ret;
8326 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8327 }
8328
8329 case BUILT_IN_ISNORMAL:
8330 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8331
8332 case BUILT_IN_ISINF_SIGN:
8333 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8334
8335 CASE_FLT_FN (BUILT_IN_ISNAN):
8336 case BUILT_IN_ISNAND32:
8337 case BUILT_IN_ISNAND64:
8338 case BUILT_IN_ISNAND128:
8339 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8340
8341 case BUILT_IN_FREE:
8342 if (integer_zerop (arg0))
8343 return build_empty_stmt (loc);
8344 break;
8345
8346 default:
8347 break;
8348 }
8349
8350 return NULL_TREE;
8351
8352 }
8353
8354 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8355 This function returns NULL_TREE if no simplification was possible. */
8356
8357 static tree
8358 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8359 {
8360 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8361 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8362
8363 if (TREE_CODE (arg0) == ERROR_MARK
8364 || TREE_CODE (arg1) == ERROR_MARK)
8365 return NULL_TREE;
8366
8367 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8368 return ret;
8369
8370 switch (fcode)
8371 {
8372 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8373 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8374 if (validate_arg (arg0, REAL_TYPE)
8375 && validate_arg (arg1, POINTER_TYPE))
8376 return do_mpfr_lgamma_r (arg0, arg1, type);
8377 break;
8378
8379 CASE_FLT_FN (BUILT_IN_FREXP):
8380 return fold_builtin_frexp (loc, arg0, arg1, type);
8381
8382 CASE_FLT_FN (BUILT_IN_MODF):
8383 return fold_builtin_modf (loc, arg0, arg1, type);
8384
8385 case BUILT_IN_STRSTR:
8386 return fold_builtin_strstr (loc, arg0, arg1, type);
8387
8388 case BUILT_IN_STRSPN:
8389 return fold_builtin_strspn (loc, arg0, arg1);
8390
8391 case BUILT_IN_STRCSPN:
8392 return fold_builtin_strcspn (loc, arg0, arg1);
8393
8394 case BUILT_IN_STRCHR:
8395 case BUILT_IN_INDEX:
8396 return fold_builtin_strchr (loc, arg0, arg1, type);
8397
8398 case BUILT_IN_STRRCHR:
8399 case BUILT_IN_RINDEX:
8400 return fold_builtin_strrchr (loc, arg0, arg1, type);
8401
8402 case BUILT_IN_STRCMP:
8403 return fold_builtin_strcmp (loc, arg0, arg1);
8404
8405 case BUILT_IN_STRPBRK:
8406 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8407
8408 case BUILT_IN_EXPECT:
8409 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8410
8411 case BUILT_IN_ISGREATER:
8412 return fold_builtin_unordered_cmp (loc, fndecl,
8413 arg0, arg1, UNLE_EXPR, LE_EXPR);
8414 case BUILT_IN_ISGREATEREQUAL:
8415 return fold_builtin_unordered_cmp (loc, fndecl,
8416 arg0, arg1, UNLT_EXPR, LT_EXPR);
8417 case BUILT_IN_ISLESS:
8418 return fold_builtin_unordered_cmp (loc, fndecl,
8419 arg0, arg1, UNGE_EXPR, GE_EXPR);
8420 case BUILT_IN_ISLESSEQUAL:
8421 return fold_builtin_unordered_cmp (loc, fndecl,
8422 arg0, arg1, UNGT_EXPR, GT_EXPR);
8423 case BUILT_IN_ISLESSGREATER:
8424 return fold_builtin_unordered_cmp (loc, fndecl,
8425 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8426 case BUILT_IN_ISUNORDERED:
8427 return fold_builtin_unordered_cmp (loc, fndecl,
8428 arg0, arg1, UNORDERED_EXPR,
8429 NOP_EXPR);
8430
8431 /* We do the folding for va_start in the expander. */
8432 case BUILT_IN_VA_START:
8433 break;
8434
8435 case BUILT_IN_OBJECT_SIZE:
8436 return fold_builtin_object_size (arg0, arg1);
8437
8438 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8439 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8440
8441 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8442 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8443
8444 default:
8445 break;
8446 }
8447 return NULL_TREE;
8448 }
8449
8450 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8451 and ARG2.
8452 This function returns NULL_TREE if no simplification was possible. */
8453
8454 static tree
8455 fold_builtin_3 (location_t loc, tree fndecl,
8456 tree arg0, tree arg1, tree arg2)
8457 {
8458 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8459 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8460
8461 if (TREE_CODE (arg0) == ERROR_MARK
8462 || TREE_CODE (arg1) == ERROR_MARK
8463 || TREE_CODE (arg2) == ERROR_MARK)
8464 return NULL_TREE;
8465
8466 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8467 arg0, arg1, arg2))
8468 return ret;
8469
8470 switch (fcode)
8471 {
8472
8473 CASE_FLT_FN (BUILT_IN_SINCOS):
8474 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8475
8476 CASE_FLT_FN (BUILT_IN_FMA):
8477 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8478
8479 CASE_FLT_FN (BUILT_IN_REMQUO):
8480 if (validate_arg (arg0, REAL_TYPE)
8481 && validate_arg (arg1, REAL_TYPE)
8482 && validate_arg (arg2, POINTER_TYPE))
8483 return do_mpfr_remquo (arg0, arg1, arg2);
8484 break;
8485
8486 case BUILT_IN_STRNCMP:
8487 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8488
8489 case BUILT_IN_MEMCHR:
8490 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8491
8492 case BUILT_IN_BCMP:
8493 case BUILT_IN_MEMCMP:
8494 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8495
8496 case BUILT_IN_EXPECT:
8497 return fold_builtin_expect (loc, arg0, arg1, arg2);
8498
8499 case BUILT_IN_ADD_OVERFLOW:
8500 case BUILT_IN_SUB_OVERFLOW:
8501 case BUILT_IN_MUL_OVERFLOW:
8502 case BUILT_IN_ADD_OVERFLOW_P:
8503 case BUILT_IN_SUB_OVERFLOW_P:
8504 case BUILT_IN_MUL_OVERFLOW_P:
8505 case BUILT_IN_SADD_OVERFLOW:
8506 case BUILT_IN_SADDL_OVERFLOW:
8507 case BUILT_IN_SADDLL_OVERFLOW:
8508 case BUILT_IN_SSUB_OVERFLOW:
8509 case BUILT_IN_SSUBL_OVERFLOW:
8510 case BUILT_IN_SSUBLL_OVERFLOW:
8511 case BUILT_IN_SMUL_OVERFLOW:
8512 case BUILT_IN_SMULL_OVERFLOW:
8513 case BUILT_IN_SMULLL_OVERFLOW:
8514 case BUILT_IN_UADD_OVERFLOW:
8515 case BUILT_IN_UADDL_OVERFLOW:
8516 case BUILT_IN_UADDLL_OVERFLOW:
8517 case BUILT_IN_USUB_OVERFLOW:
8518 case BUILT_IN_USUBL_OVERFLOW:
8519 case BUILT_IN_USUBLL_OVERFLOW:
8520 case BUILT_IN_UMUL_OVERFLOW:
8521 case BUILT_IN_UMULL_OVERFLOW:
8522 case BUILT_IN_UMULLL_OVERFLOW:
8523 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8524
8525 default:
8526 break;
8527 }
8528 return NULL_TREE;
8529 }
8530
8531 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8532 arguments. IGNORE is true if the result of the
8533 function call is ignored. This function returns NULL_TREE if no
8534 simplification was possible. */
8535
8536 tree
8537 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8538 {
8539 tree ret = NULL_TREE;
8540
8541 switch (nargs)
8542 {
8543 case 0:
8544 ret = fold_builtin_0 (loc, fndecl);
8545 break;
8546 case 1:
8547 ret = fold_builtin_1 (loc, fndecl, args[0]);
8548 break;
8549 case 2:
8550 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8551 break;
8552 case 3:
8553 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8554 break;
8555 default:
8556 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8557 break;
8558 }
8559 if (ret)
8560 {
8561 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8562 SET_EXPR_LOCATION (ret, loc);
8563 TREE_NO_WARNING (ret) = 1;
8564 return ret;
8565 }
8566 return NULL_TREE;
8567 }
8568
8569 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8570 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8571 of arguments in ARGS to be omitted. OLDNARGS is the number of
8572 elements in ARGS. */
8573
8574 static tree
8575 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8576 int skip, tree fndecl, int n, va_list newargs)
8577 {
8578 int nargs = oldnargs - skip + n;
8579 tree *buffer;
8580
8581 if (n > 0)
8582 {
8583 int i, j;
8584
8585 buffer = XALLOCAVEC (tree, nargs);
8586 for (i = 0; i < n; i++)
8587 buffer[i] = va_arg (newargs, tree);
8588 for (j = skip; j < oldnargs; j++, i++)
8589 buffer[i] = args[j];
8590 }
8591 else
8592 buffer = args + skip;
8593
8594 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8595 }
8596
8597 /* Return true if FNDECL shouldn't be folded right now.
8598 If a built-in function has an inline attribute always_inline
8599 wrapper, defer folding it after always_inline functions have
8600 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8601 might not be performed. */
8602
8603 bool
8604 avoid_folding_inline_builtin (tree fndecl)
8605 {
8606 return (DECL_DECLARED_INLINE_P (fndecl)
8607 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8608 && cfun
8609 && !cfun->always_inline_functions_inlined
8610 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8611 }
8612
8613 /* A wrapper function for builtin folding that prevents warnings for
8614 "statement without effect" and the like, caused by removing the
8615 call node earlier than the warning is generated. */
8616
8617 tree
8618 fold_call_expr (location_t loc, tree exp, bool ignore)
8619 {
8620 tree ret = NULL_TREE;
8621 tree fndecl = get_callee_fndecl (exp);
8622 if (fndecl
8623 && TREE_CODE (fndecl) == FUNCTION_DECL
8624 && DECL_BUILT_IN (fndecl)
8625 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8626 yet. Defer folding until we see all the arguments
8627 (after inlining). */
8628 && !CALL_EXPR_VA_ARG_PACK (exp))
8629 {
8630 int nargs = call_expr_nargs (exp);
8631
8632 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8633 instead last argument is __builtin_va_arg_pack (). Defer folding
8634 even in that case, until arguments are finalized. */
8635 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8636 {
8637 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8638 if (fndecl2
8639 && TREE_CODE (fndecl2) == FUNCTION_DECL
8640 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8641 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8642 return NULL_TREE;
8643 }
8644
8645 if (avoid_folding_inline_builtin (fndecl))
8646 return NULL_TREE;
8647
8648 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8649 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8650 CALL_EXPR_ARGP (exp), ignore);
8651 else
8652 {
8653 tree *args = CALL_EXPR_ARGP (exp);
8654 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8655 if (ret)
8656 return ret;
8657 }
8658 }
8659 return NULL_TREE;
8660 }
8661
8662 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8663 N arguments are passed in the array ARGARRAY. Return a folded
8664 expression or NULL_TREE if no simplification was possible. */
8665
8666 tree
8667 fold_builtin_call_array (location_t loc, tree,
8668 tree fn,
8669 int n,
8670 tree *argarray)
8671 {
8672 if (TREE_CODE (fn) != ADDR_EXPR)
8673 return NULL_TREE;
8674
8675 tree fndecl = TREE_OPERAND (fn, 0);
8676 if (TREE_CODE (fndecl) == FUNCTION_DECL
8677 && DECL_BUILT_IN (fndecl))
8678 {
8679 /* If last argument is __builtin_va_arg_pack (), arguments to this
8680 function are not finalized yet. Defer folding until they are. */
8681 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8682 {
8683 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8684 if (fndecl2
8685 && TREE_CODE (fndecl2) == FUNCTION_DECL
8686 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8687 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8688 return NULL_TREE;
8689 }
8690 if (avoid_folding_inline_builtin (fndecl))
8691 return NULL_TREE;
8692 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8693 return targetm.fold_builtin (fndecl, n, argarray, false);
8694 else
8695 return fold_builtin_n (loc, fndecl, argarray, n, false);
8696 }
8697
8698 return NULL_TREE;
8699 }
8700
8701 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8702 along with N new arguments specified as the "..." parameters. SKIP
8703 is the number of arguments in EXP to be omitted. This function is used
8704 to do varargs-to-varargs transformations. */
8705
8706 static tree
8707 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8708 {
8709 va_list ap;
8710 tree t;
8711
8712 va_start (ap, n);
8713 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8714 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8715 va_end (ap);
8716
8717 return t;
8718 }
8719
8720 /* Validate a single argument ARG against a tree code CODE representing
8721 a type. */
8722
8723 static bool
8724 validate_arg (const_tree arg, enum tree_code code)
8725 {
8726 if (!arg)
8727 return false;
8728 else if (code == POINTER_TYPE)
8729 return POINTER_TYPE_P (TREE_TYPE (arg));
8730 else if (code == INTEGER_TYPE)
8731 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8732 return code == TREE_CODE (TREE_TYPE (arg));
8733 }
8734
8735 /* This function validates the types of a function call argument list
8736 against a specified list of tree_codes. If the last specifier is a 0,
8737 that represents an ellipses, otherwise the last specifier must be a
8738 VOID_TYPE.
8739
8740 This is the GIMPLE version of validate_arglist. Eventually we want to
8741 completely convert builtins.c to work from GIMPLEs and the tree based
8742 validate_arglist will then be removed. */
8743
8744 bool
8745 validate_gimple_arglist (const gcall *call, ...)
8746 {
8747 enum tree_code code;
8748 bool res = 0;
8749 va_list ap;
8750 const_tree arg;
8751 size_t i;
8752
8753 va_start (ap, call);
8754 i = 0;
8755
8756 do
8757 {
8758 code = (enum tree_code) va_arg (ap, int);
8759 switch (code)
8760 {
8761 case 0:
8762 /* This signifies an ellipses, any further arguments are all ok. */
8763 res = true;
8764 goto end;
8765 case VOID_TYPE:
8766 /* This signifies an endlink, if no arguments remain, return
8767 true, otherwise return false. */
8768 res = (i == gimple_call_num_args (call));
8769 goto end;
8770 default:
8771 /* If no parameters remain or the parameter's code does not
8772 match the specified code, return false. Otherwise continue
8773 checking any remaining arguments. */
8774 arg = gimple_call_arg (call, i++);
8775 if (!validate_arg (arg, code))
8776 goto end;
8777 break;
8778 }
8779 }
8780 while (1);
8781
8782 /* We need gotos here since we can only have one VA_CLOSE in a
8783 function. */
8784 end: ;
8785 va_end (ap);
8786
8787 return res;
8788 }
8789
8790 /* Default target-specific builtin expander that does nothing. */
8791
8792 rtx
8793 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8794 rtx target ATTRIBUTE_UNUSED,
8795 rtx subtarget ATTRIBUTE_UNUSED,
8796 machine_mode mode ATTRIBUTE_UNUSED,
8797 int ignore ATTRIBUTE_UNUSED)
8798 {
8799 return NULL_RTX;
8800 }
8801
8802 /* Returns true is EXP represents data that would potentially reside
8803 in a readonly section. */
8804
8805 bool
8806 readonly_data_expr (tree exp)
8807 {
8808 STRIP_NOPS (exp);
8809
8810 if (TREE_CODE (exp) != ADDR_EXPR)
8811 return false;
8812
8813 exp = get_base_address (TREE_OPERAND (exp, 0));
8814 if (!exp)
8815 return false;
8816
8817 /* Make sure we call decl_readonly_section only for trees it
8818 can handle (since it returns true for everything it doesn't
8819 understand). */
8820 if (TREE_CODE (exp) == STRING_CST
8821 || TREE_CODE (exp) == CONSTRUCTOR
8822 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8823 return decl_readonly_section (exp, 0);
8824 else
8825 return false;
8826 }
8827
8828 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8829 to the call, and TYPE is its return type.
8830
8831 Return NULL_TREE if no simplification was possible, otherwise return the
8832 simplified form of the call as a tree.
8833
8834 The simplified form may be a constant or other expression which
8835 computes the same value, but in a more efficient manner (including
8836 calls to other builtin functions).
8837
8838 The call may contain arguments which need to be evaluated, but
8839 which are not useful to determine the result of the call. In
8840 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8841 COMPOUND_EXPR will be an argument which must be evaluated.
8842 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8843 COMPOUND_EXPR in the chain will contain the tree for the simplified
8844 form of the builtin function call. */
8845
8846 static tree
8847 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8848 {
8849 if (!validate_arg (s1, POINTER_TYPE)
8850 || !validate_arg (s2, POINTER_TYPE))
8851 return NULL_TREE;
8852 else
8853 {
8854 tree fn;
8855 const char *p1, *p2;
8856
8857 p2 = c_getstr (s2);
8858 if (p2 == NULL)
8859 return NULL_TREE;
8860
8861 p1 = c_getstr (s1);
8862 if (p1 != NULL)
8863 {
8864 const char *r = strstr (p1, p2);
8865 tree tem;
8866
8867 if (r == NULL)
8868 return build_int_cst (TREE_TYPE (s1), 0);
8869
8870 /* Return an offset into the constant string argument. */
8871 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8872 return fold_convert_loc (loc, type, tem);
8873 }
8874
8875 /* The argument is const char *, and the result is char *, so we need
8876 a type conversion here to avoid a warning. */
8877 if (p2[0] == '\0')
8878 return fold_convert_loc (loc, type, s1);
8879
8880 if (p2[1] != '\0')
8881 return NULL_TREE;
8882
8883 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8884 if (!fn)
8885 return NULL_TREE;
8886
8887 /* New argument list transforming strstr(s1, s2) to
8888 strchr(s1, s2[0]). */
8889 return build_call_expr_loc (loc, fn, 2, s1,
8890 build_int_cst (integer_type_node, p2[0]));
8891 }
8892 }
8893
8894 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8895 the call, and TYPE is its return type.
8896
8897 Return NULL_TREE if no simplification was possible, otherwise return the
8898 simplified form of the call as a tree.
8899
8900 The simplified form may be a constant or other expression which
8901 computes the same value, but in a more efficient manner (including
8902 calls to other builtin functions).
8903
8904 The call may contain arguments which need to be evaluated, but
8905 which are not useful to determine the result of the call. In
8906 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8907 COMPOUND_EXPR will be an argument which must be evaluated.
8908 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8909 COMPOUND_EXPR in the chain will contain the tree for the simplified
8910 form of the builtin function call. */
8911
8912 static tree
8913 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8914 {
8915 if (!validate_arg (s1, POINTER_TYPE)
8916 || !validate_arg (s2, INTEGER_TYPE))
8917 return NULL_TREE;
8918 else
8919 {
8920 const char *p1;
8921
8922 if (TREE_CODE (s2) != INTEGER_CST)
8923 return NULL_TREE;
8924
8925 p1 = c_getstr (s1);
8926 if (p1 != NULL)
8927 {
8928 char c;
8929 const char *r;
8930 tree tem;
8931
8932 if (target_char_cast (s2, &c))
8933 return NULL_TREE;
8934
8935 r = strchr (p1, c);
8936
8937 if (r == NULL)
8938 return build_int_cst (TREE_TYPE (s1), 0);
8939
8940 /* Return an offset into the constant string argument. */
8941 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8942 return fold_convert_loc (loc, type, tem);
8943 }
8944 return NULL_TREE;
8945 }
8946 }
8947
8948 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8949 the call, and TYPE is its return type.
8950
8951 Return NULL_TREE if no simplification was possible, otherwise return the
8952 simplified form of the call as a tree.
8953
8954 The simplified form may be a constant or other expression which
8955 computes the same value, but in a more efficient manner (including
8956 calls to other builtin functions).
8957
8958 The call may contain arguments which need to be evaluated, but
8959 which are not useful to determine the result of the call. In
8960 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8961 COMPOUND_EXPR will be an argument which must be evaluated.
8962 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8963 COMPOUND_EXPR in the chain will contain the tree for the simplified
8964 form of the builtin function call. */
8965
8966 static tree
8967 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8968 {
8969 if (!validate_arg (s1, POINTER_TYPE)
8970 || !validate_arg (s2, INTEGER_TYPE))
8971 return NULL_TREE;
8972 else
8973 {
8974 tree fn;
8975 const char *p1;
8976
8977 if (TREE_CODE (s2) != INTEGER_CST)
8978 return NULL_TREE;
8979
8980 p1 = c_getstr (s1);
8981 if (p1 != NULL)
8982 {
8983 char c;
8984 const char *r;
8985 tree tem;
8986
8987 if (target_char_cast (s2, &c))
8988 return NULL_TREE;
8989
8990 r = strrchr (p1, c);
8991
8992 if (r == NULL)
8993 return build_int_cst (TREE_TYPE (s1), 0);
8994
8995 /* Return an offset into the constant string argument. */
8996 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8997 return fold_convert_loc (loc, type, tem);
8998 }
8999
9000 if (! integer_zerop (s2))
9001 return NULL_TREE;
9002
9003 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9004 if (!fn)
9005 return NULL_TREE;
9006
9007 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9008 return build_call_expr_loc (loc, fn, 2, s1, s2);
9009 }
9010 }
9011
9012 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9013 to the call, and TYPE is its return type.
9014
9015 Return NULL_TREE if no simplification was possible, otherwise return the
9016 simplified form of the call as a tree.
9017
9018 The simplified form may be a constant or other expression which
9019 computes the same value, but in a more efficient manner (including
9020 calls to other builtin functions).
9021
9022 The call may contain arguments which need to be evaluated, but
9023 which are not useful to determine the result of the call. In
9024 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9025 COMPOUND_EXPR will be an argument which must be evaluated.
9026 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9027 COMPOUND_EXPR in the chain will contain the tree for the simplified
9028 form of the builtin function call. */
9029
9030 static tree
9031 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9032 {
9033 if (!validate_arg (s1, POINTER_TYPE)
9034 || !validate_arg (s2, POINTER_TYPE))
9035 return NULL_TREE;
9036 else
9037 {
9038 tree fn;
9039 const char *p1, *p2;
9040
9041 p2 = c_getstr (s2);
9042 if (p2 == NULL)
9043 return NULL_TREE;
9044
9045 p1 = c_getstr (s1);
9046 if (p1 != NULL)
9047 {
9048 const char *r = strpbrk (p1, p2);
9049 tree tem;
9050
9051 if (r == NULL)
9052 return build_int_cst (TREE_TYPE (s1), 0);
9053
9054 /* Return an offset into the constant string argument. */
9055 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9056 return fold_convert_loc (loc, type, tem);
9057 }
9058
9059 if (p2[0] == '\0')
9060 /* strpbrk(x, "") == NULL.
9061 Evaluate and ignore s1 in case it had side-effects. */
9062 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9063
9064 if (p2[1] != '\0')
9065 return NULL_TREE; /* Really call strpbrk. */
9066
9067 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9068 if (!fn)
9069 return NULL_TREE;
9070
9071 /* New argument list transforming strpbrk(s1, s2) to
9072 strchr(s1, s2[0]). */
9073 return build_call_expr_loc (loc, fn, 2, s1,
9074 build_int_cst (integer_type_node, p2[0]));
9075 }
9076 }
9077
9078 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9079 to the call.
9080
9081 Return NULL_TREE if no simplification was possible, otherwise return the
9082 simplified form of the call as a tree.
9083
9084 The simplified form may be a constant or other expression which
9085 computes the same value, but in a more efficient manner (including
9086 calls to other builtin functions).
9087
9088 The call may contain arguments which need to be evaluated, but
9089 which are not useful to determine the result of the call. In
9090 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9091 COMPOUND_EXPR will be an argument which must be evaluated.
9092 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9093 COMPOUND_EXPR in the chain will contain the tree for the simplified
9094 form of the builtin function call. */
9095
9096 static tree
9097 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9098 {
9099 if (!validate_arg (s1, POINTER_TYPE)
9100 || !validate_arg (s2, POINTER_TYPE))
9101 return NULL_TREE;
9102 else
9103 {
9104 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9105
9106 /* If either argument is "", return NULL_TREE. */
9107 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9108 /* Evaluate and ignore both arguments in case either one has
9109 side-effects. */
9110 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9111 s1, s2);
9112 return NULL_TREE;
9113 }
9114 }
9115
9116 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9117 to the call.
9118
9119 Return NULL_TREE if no simplification was possible, otherwise return the
9120 simplified form of the call as a tree.
9121
9122 The simplified form may be a constant or other expression which
9123 computes the same value, but in a more efficient manner (including
9124 calls to other builtin functions).
9125
9126 The call may contain arguments which need to be evaluated, but
9127 which are not useful to determine the result of the call. In
9128 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9129 COMPOUND_EXPR will be an argument which must be evaluated.
9130 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9131 COMPOUND_EXPR in the chain will contain the tree for the simplified
9132 form of the builtin function call. */
9133
9134 static tree
9135 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9136 {
9137 if (!validate_arg (s1, POINTER_TYPE)
9138 || !validate_arg (s2, POINTER_TYPE))
9139 return NULL_TREE;
9140 else
9141 {
9142 /* If the first argument is "", return NULL_TREE. */
9143 const char *p1 = c_getstr (s1);
9144 if (p1 && *p1 == '\0')
9145 {
9146 /* Evaluate and ignore argument s2 in case it has
9147 side-effects. */
9148 return omit_one_operand_loc (loc, size_type_node,
9149 size_zero_node, s2);
9150 }
9151
9152 /* If the second argument is "", return __builtin_strlen(s1). */
9153 const char *p2 = c_getstr (s2);
9154 if (p2 && *p2 == '\0')
9155 {
9156 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9157
9158 /* If the replacement _DECL isn't initialized, don't do the
9159 transformation. */
9160 if (!fn)
9161 return NULL_TREE;
9162
9163 return build_call_expr_loc (loc, fn, 1, s1);
9164 }
9165 return NULL_TREE;
9166 }
9167 }
9168
9169 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9170 produced. False otherwise. This is done so that we don't output the error
9171 or warning twice or three times. */
9172
9173 bool
9174 fold_builtin_next_arg (tree exp, bool va_start_p)
9175 {
9176 tree fntype = TREE_TYPE (current_function_decl);
9177 int nargs = call_expr_nargs (exp);
9178 tree arg;
9179 /* There is good chance the current input_location points inside the
9180 definition of the va_start macro (perhaps on the token for
9181 builtin) in a system header, so warnings will not be emitted.
9182 Use the location in real source code. */
9183 source_location current_location =
9184 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9185 NULL);
9186
9187 if (!stdarg_p (fntype))
9188 {
9189 error ("%<va_start%> used in function with fixed args");
9190 return true;
9191 }
9192
9193 if (va_start_p)
9194 {
9195 if (va_start_p && (nargs != 2))
9196 {
9197 error ("wrong number of arguments to function %<va_start%>");
9198 return true;
9199 }
9200 arg = CALL_EXPR_ARG (exp, 1);
9201 }
9202 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9203 when we checked the arguments and if needed issued a warning. */
9204 else
9205 {
9206 if (nargs == 0)
9207 {
9208 /* Evidently an out of date version of <stdarg.h>; can't validate
9209 va_start's second argument, but can still work as intended. */
9210 warning_at (current_location,
9211 OPT_Wvarargs,
9212 "%<__builtin_next_arg%> called without an argument");
9213 return true;
9214 }
9215 else if (nargs > 1)
9216 {
9217 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9218 return true;
9219 }
9220 arg = CALL_EXPR_ARG (exp, 0);
9221 }
9222
9223 if (TREE_CODE (arg) == SSA_NAME)
9224 arg = SSA_NAME_VAR (arg);
9225
9226 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9227 or __builtin_next_arg (0) the first time we see it, after checking
9228 the arguments and if needed issuing a warning. */
9229 if (!integer_zerop (arg))
9230 {
9231 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9232
9233 /* Strip off all nops for the sake of the comparison. This
9234 is not quite the same as STRIP_NOPS. It does more.
9235 We must also strip off INDIRECT_EXPR for C++ reference
9236 parameters. */
9237 while (CONVERT_EXPR_P (arg)
9238 || TREE_CODE (arg) == INDIRECT_REF)
9239 arg = TREE_OPERAND (arg, 0);
9240 if (arg != last_parm)
9241 {
9242 /* FIXME: Sometimes with the tree optimizers we can get the
9243 not the last argument even though the user used the last
9244 argument. We just warn and set the arg to be the last
9245 argument so that we will get wrong-code because of
9246 it. */
9247 warning_at (current_location,
9248 OPT_Wvarargs,
9249 "second parameter of %<va_start%> not last named argument");
9250 }
9251
9252 /* Undefined by C99 7.15.1.4p4 (va_start):
9253 "If the parameter parmN is declared with the register storage
9254 class, with a function or array type, or with a type that is
9255 not compatible with the type that results after application of
9256 the default argument promotions, the behavior is undefined."
9257 */
9258 else if (DECL_REGISTER (arg))
9259 {
9260 warning_at (current_location,
9261 OPT_Wvarargs,
9262 "undefined behavior when second parameter of "
9263 "%<va_start%> is declared with %<register%> storage");
9264 }
9265
9266 /* We want to verify the second parameter just once before the tree
9267 optimizers are run and then avoid keeping it in the tree,
9268 as otherwise we could warn even for correct code like:
9269 void foo (int i, ...)
9270 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9271 if (va_start_p)
9272 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9273 else
9274 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9275 }
9276 return false;
9277 }
9278
9279
9280 /* Expand a call EXP to __builtin_object_size. */
9281
9282 static rtx
9283 expand_builtin_object_size (tree exp)
9284 {
9285 tree ost;
9286 int object_size_type;
9287 tree fndecl = get_callee_fndecl (exp);
9288
9289 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9290 {
9291 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9292 exp, fndecl);
9293 expand_builtin_trap ();
9294 return const0_rtx;
9295 }
9296
9297 ost = CALL_EXPR_ARG (exp, 1);
9298 STRIP_NOPS (ost);
9299
9300 if (TREE_CODE (ost) != INTEGER_CST
9301 || tree_int_cst_sgn (ost) < 0
9302 || compare_tree_int (ost, 3) > 0)
9303 {
9304 error ("%Klast argument of %D is not integer constant between 0 and 3",
9305 exp, fndecl);
9306 expand_builtin_trap ();
9307 return const0_rtx;
9308 }
9309
9310 object_size_type = tree_to_shwi (ost);
9311
9312 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9313 }
9314
9315 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9316 FCODE is the BUILT_IN_* to use.
9317 Return NULL_RTX if we failed; the caller should emit a normal call,
9318 otherwise try to get the result in TARGET, if convenient (and in
9319 mode MODE if that's convenient). */
9320
9321 static rtx
9322 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9323 enum built_in_function fcode)
9324 {
9325 tree dest, src, len, size;
9326
9327 if (!validate_arglist (exp,
9328 POINTER_TYPE,
9329 fcode == BUILT_IN_MEMSET_CHK
9330 ? INTEGER_TYPE : POINTER_TYPE,
9331 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9332 return NULL_RTX;
9333
9334 dest = CALL_EXPR_ARG (exp, 0);
9335 src = CALL_EXPR_ARG (exp, 1);
9336 len = CALL_EXPR_ARG (exp, 2);
9337 size = CALL_EXPR_ARG (exp, 3);
9338
9339 if (! tree_fits_uhwi_p (size))
9340 return NULL_RTX;
9341
9342 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9343 {
9344 tree fn;
9345
9346 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9347 {
9348 warning_at (tree_nonartificial_location (exp),
9349 0, "%Kcall to %D will always overflow destination buffer",
9350 exp, get_callee_fndecl (exp));
9351 return NULL_RTX;
9352 }
9353
9354 fn = NULL_TREE;
9355 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9356 mem{cpy,pcpy,move,set} is available. */
9357 switch (fcode)
9358 {
9359 case BUILT_IN_MEMCPY_CHK:
9360 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9361 break;
9362 case BUILT_IN_MEMPCPY_CHK:
9363 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9364 break;
9365 case BUILT_IN_MEMMOVE_CHK:
9366 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9367 break;
9368 case BUILT_IN_MEMSET_CHK:
9369 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9370 break;
9371 default:
9372 break;
9373 }
9374
9375 if (! fn)
9376 return NULL_RTX;
9377
9378 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9379 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9380 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9381 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9382 }
9383 else if (fcode == BUILT_IN_MEMSET_CHK)
9384 return NULL_RTX;
9385 else
9386 {
9387 unsigned int dest_align = get_pointer_alignment (dest);
9388
9389 /* If DEST is not a pointer type, call the normal function. */
9390 if (dest_align == 0)
9391 return NULL_RTX;
9392
9393 /* If SRC and DEST are the same (and not volatile), do nothing. */
9394 if (operand_equal_p (src, dest, 0))
9395 {
9396 tree expr;
9397
9398 if (fcode != BUILT_IN_MEMPCPY_CHK)
9399 {
9400 /* Evaluate and ignore LEN in case it has side-effects. */
9401 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9402 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9403 }
9404
9405 expr = fold_build_pointer_plus (dest, len);
9406 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9407 }
9408
9409 /* __memmove_chk special case. */
9410 if (fcode == BUILT_IN_MEMMOVE_CHK)
9411 {
9412 unsigned int src_align = get_pointer_alignment (src);
9413
9414 if (src_align == 0)
9415 return NULL_RTX;
9416
9417 /* If src is categorized for a readonly section we can use
9418 normal __memcpy_chk. */
9419 if (readonly_data_expr (src))
9420 {
9421 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9422 if (!fn)
9423 return NULL_RTX;
9424 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9425 dest, src, len, size);
9426 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9427 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9428 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9429 }
9430 }
9431 return NULL_RTX;
9432 }
9433 }
9434
9435 /* Emit warning if a buffer overflow is detected at compile time. */
9436
9437 static void
9438 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9439 {
9440 int is_strlen = 0;
9441 tree len, size;
9442 location_t loc = tree_nonartificial_location (exp);
9443
9444 switch (fcode)
9445 {
9446 case BUILT_IN_STRCPY_CHK:
9447 case BUILT_IN_STPCPY_CHK:
9448 /* For __strcat_chk the warning will be emitted only if overflowing
9449 by at least strlen (dest) + 1 bytes. */
9450 case BUILT_IN_STRCAT_CHK:
9451 len = CALL_EXPR_ARG (exp, 1);
9452 size = CALL_EXPR_ARG (exp, 2);
9453 is_strlen = 1;
9454 break;
9455 case BUILT_IN_STRNCAT_CHK:
9456 case BUILT_IN_STRNCPY_CHK:
9457 case BUILT_IN_STPNCPY_CHK:
9458 len = CALL_EXPR_ARG (exp, 2);
9459 size = CALL_EXPR_ARG (exp, 3);
9460 break;
9461 case BUILT_IN_SNPRINTF_CHK:
9462 case BUILT_IN_VSNPRINTF_CHK:
9463 len = CALL_EXPR_ARG (exp, 1);
9464 size = CALL_EXPR_ARG (exp, 3);
9465 break;
9466 default:
9467 gcc_unreachable ();
9468 }
9469
9470 if (!len || !size)
9471 return;
9472
9473 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9474 return;
9475
9476 if (is_strlen)
9477 {
9478 len = c_strlen (len, 1);
9479 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9480 return;
9481 }
9482 else if (fcode == BUILT_IN_STRNCAT_CHK)
9483 {
9484 tree src = CALL_EXPR_ARG (exp, 1);
9485 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9486 return;
9487 src = c_strlen (src, 1);
9488 if (! src || ! tree_fits_uhwi_p (src))
9489 {
9490 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9491 exp, get_callee_fndecl (exp));
9492 return;
9493 }
9494 else if (tree_int_cst_lt (src, size))
9495 return;
9496 }
9497 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9498 return;
9499
9500 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9501 exp, get_callee_fndecl (exp));
9502 }
9503
9504 /* Emit warning if a buffer overflow is detected at compile time
9505 in __sprintf_chk/__vsprintf_chk calls. */
9506
9507 static void
9508 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9509 {
9510 tree size, len, fmt;
9511 const char *fmt_str;
9512 int nargs = call_expr_nargs (exp);
9513
9514 /* Verify the required arguments in the original call. */
9515
9516 if (nargs < 4)
9517 return;
9518 size = CALL_EXPR_ARG (exp, 2);
9519 fmt = CALL_EXPR_ARG (exp, 3);
9520
9521 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9522 return;
9523
9524 /* Check whether the format is a literal string constant. */
9525 fmt_str = c_getstr (fmt);
9526 if (fmt_str == NULL)
9527 return;
9528
9529 if (!init_target_chars ())
9530 return;
9531
9532 /* If the format doesn't contain % args or %%, we know its size. */
9533 if (strchr (fmt_str, target_percent) == 0)
9534 len = build_int_cstu (size_type_node, strlen (fmt_str));
9535 /* If the format is "%s" and first ... argument is a string literal,
9536 we know it too. */
9537 else if (fcode == BUILT_IN_SPRINTF_CHK
9538 && strcmp (fmt_str, target_percent_s) == 0)
9539 {
9540 tree arg;
9541
9542 if (nargs < 5)
9543 return;
9544 arg = CALL_EXPR_ARG (exp, 4);
9545 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9546 return;
9547
9548 len = c_strlen (arg, 1);
9549 if (!len || ! tree_fits_uhwi_p (len))
9550 return;
9551 }
9552 else
9553 return;
9554
9555 if (! tree_int_cst_lt (len, size))
9556 warning_at (tree_nonartificial_location (exp),
9557 0, "%Kcall to %D will always overflow destination buffer",
9558 exp, get_callee_fndecl (exp));
9559 }
9560
9561 /* Emit warning if a free is called with address of a variable. */
9562
9563 static void
9564 maybe_emit_free_warning (tree exp)
9565 {
9566 tree arg = CALL_EXPR_ARG (exp, 0);
9567
9568 STRIP_NOPS (arg);
9569 if (TREE_CODE (arg) != ADDR_EXPR)
9570 return;
9571
9572 arg = get_base_address (TREE_OPERAND (arg, 0));
9573 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9574 return;
9575
9576 if (SSA_VAR_P (arg))
9577 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9578 "%Kattempt to free a non-heap object %qD", exp, arg);
9579 else
9580 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9581 "%Kattempt to free a non-heap object", exp);
9582 }
9583
9584 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9585 if possible. */
9586
9587 static tree
9588 fold_builtin_object_size (tree ptr, tree ost)
9589 {
9590 unsigned HOST_WIDE_INT bytes;
9591 int object_size_type;
9592
9593 if (!validate_arg (ptr, POINTER_TYPE)
9594 || !validate_arg (ost, INTEGER_TYPE))
9595 return NULL_TREE;
9596
9597 STRIP_NOPS (ost);
9598
9599 if (TREE_CODE (ost) != INTEGER_CST
9600 || tree_int_cst_sgn (ost) < 0
9601 || compare_tree_int (ost, 3) > 0)
9602 return NULL_TREE;
9603
9604 object_size_type = tree_to_shwi (ost);
9605
9606 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9607 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9608 and (size_t) 0 for types 2 and 3. */
9609 if (TREE_SIDE_EFFECTS (ptr))
9610 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9611
9612 if (TREE_CODE (ptr) == ADDR_EXPR)
9613 {
9614 compute_builtin_object_size (ptr, object_size_type, &bytes);
9615 if (wi::fits_to_tree_p (bytes, size_type_node))
9616 return build_int_cstu (size_type_node, bytes);
9617 }
9618 else if (TREE_CODE (ptr) == SSA_NAME)
9619 {
9620 /* If object size is not known yet, delay folding until
9621 later. Maybe subsequent passes will help determining
9622 it. */
9623 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9624 && wi::fits_to_tree_p (bytes, size_type_node))
9625 return build_int_cstu (size_type_node, bytes);
9626 }
9627
9628 return NULL_TREE;
9629 }
9630
9631 /* Builtins with folding operations that operate on "..." arguments
9632 need special handling; we need to store the arguments in a convenient
9633 data structure before attempting any folding. Fortunately there are
9634 only a few builtins that fall into this category. FNDECL is the
9635 function, EXP is the CALL_EXPR for the call. */
9636
9637 static tree
9638 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9639 {
9640 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9641 tree ret = NULL_TREE;
9642
9643 switch (fcode)
9644 {
9645 case BUILT_IN_FPCLASSIFY:
9646 ret = fold_builtin_fpclassify (loc, args, nargs);
9647 break;
9648
9649 default:
9650 break;
9651 }
9652 if (ret)
9653 {
9654 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9655 SET_EXPR_LOCATION (ret, loc);
9656 TREE_NO_WARNING (ret) = 1;
9657 return ret;
9658 }
9659 return NULL_TREE;
9660 }
9661
9662 /* Initialize format string characters in the target charset. */
9663
9664 bool
9665 init_target_chars (void)
9666 {
9667 static bool init;
9668 if (!init)
9669 {
9670 target_newline = lang_hooks.to_target_charset ('\n');
9671 target_percent = lang_hooks.to_target_charset ('%');
9672 target_c = lang_hooks.to_target_charset ('c');
9673 target_s = lang_hooks.to_target_charset ('s');
9674 if (target_newline == 0 || target_percent == 0 || target_c == 0
9675 || target_s == 0)
9676 return false;
9677
9678 target_percent_c[0] = target_percent;
9679 target_percent_c[1] = target_c;
9680 target_percent_c[2] = '\0';
9681
9682 target_percent_s[0] = target_percent;
9683 target_percent_s[1] = target_s;
9684 target_percent_s[2] = '\0';
9685
9686 target_percent_s_newline[0] = target_percent;
9687 target_percent_s_newline[1] = target_s;
9688 target_percent_s_newline[2] = target_newline;
9689 target_percent_s_newline[3] = '\0';
9690
9691 init = true;
9692 }
9693 return true;
9694 }
9695
9696 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9697 and no overflow/underflow occurred. INEXACT is true if M was not
9698 exactly calculated. TYPE is the tree type for the result. This
9699 function assumes that you cleared the MPFR flags and then
9700 calculated M to see if anything subsequently set a flag prior to
9701 entering this function. Return NULL_TREE if any checks fail. */
9702
9703 static tree
9704 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9705 {
9706 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9707 overflow/underflow occurred. If -frounding-math, proceed iff the
9708 result of calling FUNC was exact. */
9709 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9710 && (!flag_rounding_math || !inexact))
9711 {
9712 REAL_VALUE_TYPE rr;
9713
9714 real_from_mpfr (&rr, m, type, GMP_RNDN);
9715 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9716 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9717 but the mpft_t is not, then we underflowed in the
9718 conversion. */
9719 if (real_isfinite (&rr)
9720 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9721 {
9722 REAL_VALUE_TYPE rmode;
9723
9724 real_convert (&rmode, TYPE_MODE (type), &rr);
9725 /* Proceed iff the specified mode can hold the value. */
9726 if (real_identical (&rmode, &rr))
9727 return build_real (type, rmode);
9728 }
9729 }
9730 return NULL_TREE;
9731 }
9732
9733 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9734 number and no overflow/underflow occurred. INEXACT is true if M
9735 was not exactly calculated. TYPE is the tree type for the result.
9736 This function assumes that you cleared the MPFR flags and then
9737 calculated M to see if anything subsequently set a flag prior to
9738 entering this function. Return NULL_TREE if any checks fail, if
9739 FORCE_CONVERT is true, then bypass the checks. */
9740
9741 static tree
9742 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9743 {
9744 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9745 overflow/underflow occurred. If -frounding-math, proceed iff the
9746 result of calling FUNC was exact. */
9747 if (force_convert
9748 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9749 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9750 && (!flag_rounding_math || !inexact)))
9751 {
9752 REAL_VALUE_TYPE re, im;
9753
9754 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9755 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9756 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9757 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9758 but the mpft_t is not, then we underflowed in the
9759 conversion. */
9760 if (force_convert
9761 || (real_isfinite (&re) && real_isfinite (&im)
9762 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9763 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9764 {
9765 REAL_VALUE_TYPE re_mode, im_mode;
9766
9767 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9768 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9769 /* Proceed iff the specified mode can hold the value. */
9770 if (force_convert
9771 || (real_identical (&re_mode, &re)
9772 && real_identical (&im_mode, &im)))
9773 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9774 build_real (TREE_TYPE (type), im_mode));
9775 }
9776 }
9777 return NULL_TREE;
9778 }
9779
9780 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9781 the pointer *(ARG_QUO) and return the result. The type is taken
9782 from the type of ARG0 and is used for setting the precision of the
9783 calculation and results. */
9784
9785 static tree
9786 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9787 {
9788 tree const type = TREE_TYPE (arg0);
9789 tree result = NULL_TREE;
9790
9791 STRIP_NOPS (arg0);
9792 STRIP_NOPS (arg1);
9793
9794 /* To proceed, MPFR must exactly represent the target floating point
9795 format, which only happens when the target base equals two. */
9796 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9797 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9798 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9799 {
9800 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9801 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9802
9803 if (real_isfinite (ra0) && real_isfinite (ra1))
9804 {
9805 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9806 const int prec = fmt->p;
9807 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9808 tree result_rem;
9809 long integer_quo;
9810 mpfr_t m0, m1;
9811
9812 mpfr_inits2 (prec, m0, m1, NULL);
9813 mpfr_from_real (m0, ra0, GMP_RNDN);
9814 mpfr_from_real (m1, ra1, GMP_RNDN);
9815 mpfr_clear_flags ();
9816 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9817 /* Remquo is independent of the rounding mode, so pass
9818 inexact=0 to do_mpfr_ckconv(). */
9819 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9820 mpfr_clears (m0, m1, NULL);
9821 if (result_rem)
9822 {
9823 /* MPFR calculates quo in the host's long so it may
9824 return more bits in quo than the target int can hold
9825 if sizeof(host long) > sizeof(target int). This can
9826 happen even for native compilers in LP64 mode. In
9827 these cases, modulo the quo value with the largest
9828 number that the target int can hold while leaving one
9829 bit for the sign. */
9830 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9831 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9832
9833 /* Dereference the quo pointer argument. */
9834 arg_quo = build_fold_indirect_ref (arg_quo);
9835 /* Proceed iff a valid pointer type was passed in. */
9836 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9837 {
9838 /* Set the value. */
9839 tree result_quo
9840 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9841 build_int_cst (TREE_TYPE (arg_quo),
9842 integer_quo));
9843 TREE_SIDE_EFFECTS (result_quo) = 1;
9844 /* Combine the quo assignment with the rem. */
9845 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9846 result_quo, result_rem));
9847 }
9848 }
9849 }
9850 }
9851 return result;
9852 }
9853
9854 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9855 resulting value as a tree with type TYPE. The mpfr precision is
9856 set to the precision of TYPE. We assume that this mpfr function
9857 returns zero if the result could be calculated exactly within the
9858 requested precision. In addition, the integer pointer represented
9859 by ARG_SG will be dereferenced and set to the appropriate signgam
9860 (-1,1) value. */
9861
9862 static tree
9863 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9864 {
9865 tree result = NULL_TREE;
9866
9867 STRIP_NOPS (arg);
9868
9869 /* To proceed, MPFR must exactly represent the target floating point
9870 format, which only happens when the target base equals two. Also
9871 verify ARG is a constant and that ARG_SG is an int pointer. */
9872 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9873 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9874 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9875 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9876 {
9877 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9878
9879 /* In addition to NaN and Inf, the argument cannot be zero or a
9880 negative integer. */
9881 if (real_isfinite (ra)
9882 && ra->cl != rvc_zero
9883 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9884 {
9885 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9886 const int prec = fmt->p;
9887 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9888 int inexact, sg;
9889 mpfr_t m;
9890 tree result_lg;
9891
9892 mpfr_init2 (m, prec);
9893 mpfr_from_real (m, ra, GMP_RNDN);
9894 mpfr_clear_flags ();
9895 inexact = mpfr_lgamma (m, &sg, m, rnd);
9896 result_lg = do_mpfr_ckconv (m, type, inexact);
9897 mpfr_clear (m);
9898 if (result_lg)
9899 {
9900 tree result_sg;
9901
9902 /* Dereference the arg_sg pointer argument. */
9903 arg_sg = build_fold_indirect_ref (arg_sg);
9904 /* Assign the signgam value into *arg_sg. */
9905 result_sg = fold_build2 (MODIFY_EXPR,
9906 TREE_TYPE (arg_sg), arg_sg,
9907 build_int_cst (TREE_TYPE (arg_sg), sg));
9908 TREE_SIDE_EFFECTS (result_sg) = 1;
9909 /* Combine the signgam assignment with the lgamma result. */
9910 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9911 result_sg, result_lg));
9912 }
9913 }
9914 }
9915
9916 return result;
9917 }
9918
9919 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9920 mpc function FUNC on it and return the resulting value as a tree
9921 with type TYPE. The mpfr precision is set to the precision of
9922 TYPE. We assume that function FUNC returns zero if the result
9923 could be calculated exactly within the requested precision. If
9924 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9925 in the arguments and/or results. */
9926
9927 tree
9928 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9929 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9930 {
9931 tree result = NULL_TREE;
9932
9933 STRIP_NOPS (arg0);
9934 STRIP_NOPS (arg1);
9935
9936 /* To proceed, MPFR must exactly represent the target floating point
9937 format, which only happens when the target base equals two. */
9938 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9939 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9940 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9941 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9942 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9943 {
9944 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9945 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9946 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9947 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9948
9949 if (do_nonfinite
9950 || (real_isfinite (re0) && real_isfinite (im0)
9951 && real_isfinite (re1) && real_isfinite (im1)))
9952 {
9953 const struct real_format *const fmt =
9954 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9955 const int prec = fmt->p;
9956 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9957 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9958 int inexact;
9959 mpc_t m0, m1;
9960
9961 mpc_init2 (m0, prec);
9962 mpc_init2 (m1, prec);
9963 mpfr_from_real (mpc_realref (m0), re0, rnd);
9964 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9965 mpfr_from_real (mpc_realref (m1), re1, rnd);
9966 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9967 mpfr_clear_flags ();
9968 inexact = func (m0, m0, m1, crnd);
9969 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9970 mpc_clear (m0);
9971 mpc_clear (m1);
9972 }
9973 }
9974
9975 return result;
9976 }
9977
9978 /* A wrapper function for builtin folding that prevents warnings for
9979 "statement without effect" and the like, caused by removing the
9980 call node earlier than the warning is generated. */
9981
9982 tree
9983 fold_call_stmt (gcall *stmt, bool ignore)
9984 {
9985 tree ret = NULL_TREE;
9986 tree fndecl = gimple_call_fndecl (stmt);
9987 location_t loc = gimple_location (stmt);
9988 if (fndecl
9989 && TREE_CODE (fndecl) == FUNCTION_DECL
9990 && DECL_BUILT_IN (fndecl)
9991 && !gimple_call_va_arg_pack_p (stmt))
9992 {
9993 int nargs = gimple_call_num_args (stmt);
9994 tree *args = (nargs > 0
9995 ? gimple_call_arg_ptr (stmt, 0)
9996 : &error_mark_node);
9997
9998 if (avoid_folding_inline_builtin (fndecl))
9999 return NULL_TREE;
10000 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10001 {
10002 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10003 }
10004 else
10005 {
10006 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10007 if (ret)
10008 {
10009 /* Propagate location information from original call to
10010 expansion of builtin. Otherwise things like
10011 maybe_emit_chk_warning, that operate on the expansion
10012 of a builtin, will use the wrong location information. */
10013 if (gimple_has_location (stmt))
10014 {
10015 tree realret = ret;
10016 if (TREE_CODE (ret) == NOP_EXPR)
10017 realret = TREE_OPERAND (ret, 0);
10018 if (CAN_HAVE_LOCATION_P (realret)
10019 && !EXPR_HAS_LOCATION (realret))
10020 SET_EXPR_LOCATION (realret, loc);
10021 return realret;
10022 }
10023 return ret;
10024 }
10025 }
10026 }
10027 return NULL_TREE;
10028 }
10029
10030 /* Look up the function in builtin_decl that corresponds to DECL
10031 and set ASMSPEC as its user assembler name. DECL must be a
10032 function decl that declares a builtin. */
10033
10034 void
10035 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10036 {
10037 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10038 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10039 && asmspec != 0);
10040
10041 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10042 set_user_assembler_name (builtin, asmspec);
10043
10044 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10045 && INT_TYPE_SIZE < BITS_PER_WORD)
10046 {
10047 set_user_assembler_libfunc ("ffs", asmspec);
10048 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10049 "ffs");
10050 }
10051 }
10052
10053 /* Return true if DECL is a builtin that expands to a constant or similarly
10054 simple code. */
10055 bool
10056 is_simple_builtin (tree decl)
10057 {
10058 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10059 switch (DECL_FUNCTION_CODE (decl))
10060 {
10061 /* Builtins that expand to constants. */
10062 case BUILT_IN_CONSTANT_P:
10063 case BUILT_IN_EXPECT:
10064 case BUILT_IN_OBJECT_SIZE:
10065 case BUILT_IN_UNREACHABLE:
10066 /* Simple register moves or loads from stack. */
10067 case BUILT_IN_ASSUME_ALIGNED:
10068 case BUILT_IN_RETURN_ADDRESS:
10069 case BUILT_IN_EXTRACT_RETURN_ADDR:
10070 case BUILT_IN_FROB_RETURN_ADDR:
10071 case BUILT_IN_RETURN:
10072 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10073 case BUILT_IN_FRAME_ADDRESS:
10074 case BUILT_IN_VA_END:
10075 case BUILT_IN_STACK_SAVE:
10076 case BUILT_IN_STACK_RESTORE:
10077 /* Exception state returns or moves registers around. */
10078 case BUILT_IN_EH_FILTER:
10079 case BUILT_IN_EH_POINTER:
10080 case BUILT_IN_EH_COPY_VALUES:
10081 return true;
10082
10083 default:
10084 return false;
10085 }
10086
10087 return false;
10088 }
10089
10090 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10091 most probably expanded inline into reasonably simple code. This is a
10092 superset of is_simple_builtin. */
10093 bool
10094 is_inexpensive_builtin (tree decl)
10095 {
10096 if (!decl)
10097 return false;
10098 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10099 return true;
10100 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10101 switch (DECL_FUNCTION_CODE (decl))
10102 {
10103 case BUILT_IN_ABS:
10104 case BUILT_IN_ALLOCA:
10105 case BUILT_IN_ALLOCA_WITH_ALIGN:
10106 case BUILT_IN_BSWAP16:
10107 case BUILT_IN_BSWAP32:
10108 case BUILT_IN_BSWAP64:
10109 case BUILT_IN_CLZ:
10110 case BUILT_IN_CLZIMAX:
10111 case BUILT_IN_CLZL:
10112 case BUILT_IN_CLZLL:
10113 case BUILT_IN_CTZ:
10114 case BUILT_IN_CTZIMAX:
10115 case BUILT_IN_CTZL:
10116 case BUILT_IN_CTZLL:
10117 case BUILT_IN_FFS:
10118 case BUILT_IN_FFSIMAX:
10119 case BUILT_IN_FFSL:
10120 case BUILT_IN_FFSLL:
10121 case BUILT_IN_IMAXABS:
10122 case BUILT_IN_FINITE:
10123 case BUILT_IN_FINITEF:
10124 case BUILT_IN_FINITEL:
10125 case BUILT_IN_FINITED32:
10126 case BUILT_IN_FINITED64:
10127 case BUILT_IN_FINITED128:
10128 case BUILT_IN_FPCLASSIFY:
10129 case BUILT_IN_ISFINITE:
10130 case BUILT_IN_ISINF_SIGN:
10131 case BUILT_IN_ISINF:
10132 case BUILT_IN_ISINFF:
10133 case BUILT_IN_ISINFL:
10134 case BUILT_IN_ISINFD32:
10135 case BUILT_IN_ISINFD64:
10136 case BUILT_IN_ISINFD128:
10137 case BUILT_IN_ISNAN:
10138 case BUILT_IN_ISNANF:
10139 case BUILT_IN_ISNANL:
10140 case BUILT_IN_ISNAND32:
10141 case BUILT_IN_ISNAND64:
10142 case BUILT_IN_ISNAND128:
10143 case BUILT_IN_ISNORMAL:
10144 case BUILT_IN_ISGREATER:
10145 case BUILT_IN_ISGREATEREQUAL:
10146 case BUILT_IN_ISLESS:
10147 case BUILT_IN_ISLESSEQUAL:
10148 case BUILT_IN_ISLESSGREATER:
10149 case BUILT_IN_ISUNORDERED:
10150 case BUILT_IN_VA_ARG_PACK:
10151 case BUILT_IN_VA_ARG_PACK_LEN:
10152 case BUILT_IN_VA_COPY:
10153 case BUILT_IN_TRAP:
10154 case BUILT_IN_SAVEREGS:
10155 case BUILT_IN_POPCOUNTL:
10156 case BUILT_IN_POPCOUNTLL:
10157 case BUILT_IN_POPCOUNTIMAX:
10158 case BUILT_IN_POPCOUNT:
10159 case BUILT_IN_PARITYL:
10160 case BUILT_IN_PARITYLL:
10161 case BUILT_IN_PARITYIMAX:
10162 case BUILT_IN_PARITY:
10163 case BUILT_IN_LABS:
10164 case BUILT_IN_LLABS:
10165 case BUILT_IN_PREFETCH:
10166 case BUILT_IN_ACC_ON_DEVICE:
10167 return true;
10168
10169 default:
10170 return is_simple_builtin (decl);
10171 }
10172
10173 return false;
10174 }